import warnings
warnings.filterwarnings("ignore")
import pandas as pd
import scipy.stats as stats
import numpy as np
from pandas.api.types import is_numeric_dtype
import statsmodels.api as sm
import seaborn as sns
from seaborn_qqplot import QQPlot
from matplotlib import pyplot as plt
from scipy.stats import gamma
from sklearn.preprocessing import StandardScaler
import pickle
from sklearn.metrics import mean_squared_error
#-------Importing tensorflow libraries-------#
from tensorflow.python.keras.models import Sequential
from tensorflow.python.keras.layers import Dense,Dropout
from tensorflow.python.keras.wrappers.scikit_learn import KerasRegressor
#------split and pipeline libraries------#
from sklearn.model_selection import train_test_split
from sklearn.model_selection import cross_val_score
from sklearn.model_selection import KFold
from sklearn.pipeline import Pipeline
#--------------------------------------------------#
from statsmodels.stats.outliers_influence import variance_inflation_factor
sns.set(color_codes=True)
%matplotlib inline
df = pd.read_csv("Part- 1,2&3 - Signal.csv")
df.head()
| Parameter 1 | Parameter 2 | Parameter 3 | Parameter 4 | Parameter 5 | Parameter 6 | Parameter 7 | Parameter 8 | Parameter 9 | Parameter 10 | Parameter 11 | Signal_Strength | |
|---|---|---|---|---|---|---|---|---|---|---|---|---|
| 0 | 7.4 | 0.70 | 0.00 | 1.9 | 0.076 | 11.0 | 34.0 | 0.9978 | 3.51 | 0.56 | 9.4 | 5 |
| 1 | 7.8 | 0.88 | 0.00 | 2.6 | 0.098 | 25.0 | 67.0 | 0.9968 | 3.20 | 0.68 | 9.8 | 5 |
| 2 | 7.8 | 0.76 | 0.04 | 2.3 | 0.092 | 15.0 | 54.0 | 0.9970 | 3.26 | 0.65 | 9.8 | 5 |
| 3 | 11.2 | 0.28 | 0.56 | 1.9 | 0.075 | 17.0 | 60.0 | 0.9980 | 3.16 | 0.58 | 9.8 | 6 |
| 4 | 7.4 | 0.70 | 0.00 | 1.9 | 0.076 | 11.0 | 34.0 | 0.9978 | 3.51 | 0.56 | 9.4 | 5 |
df.isnull().any()
Parameter 1 False Parameter 2 False Parameter 3 False Parameter 4 False Parameter 5 False Parameter 6 False Parameter 7 False Parameter 8 False Parameter 9 False Parameter 10 False Parameter 11 False Signal_Strength False dtype: bool
df.info(verbose=True)
<class 'pandas.core.frame.DataFrame'> RangeIndex: 1599 entries, 0 to 1598 Data columns (total 12 columns): Parameter 1 1599 non-null float64 Parameter 2 1599 non-null float64 Parameter 3 1599 non-null float64 Parameter 4 1599 non-null float64 Parameter 5 1599 non-null float64 Parameter 6 1599 non-null float64 Parameter 7 1599 non-null float64 Parameter 8 1599 non-null float64 Parameter 9 1599 non-null float64 Parameter 10 1599 non-null float64 Parameter 11 1599 non-null float64 Signal_Strength 1599 non-null int64 dtypes: float64(11), int64(1) memory usage: 150.0 KB
df['Signal_Strength'] = df['Signal_Strength'].astype('object')
df.info(verbose=True)
<class 'pandas.core.frame.DataFrame'> RangeIndex: 1599 entries, 0 to 1598 Data columns (total 12 columns): Parameter 1 1599 non-null float64 Parameter 2 1599 non-null float64 Parameter 3 1599 non-null float64 Parameter 4 1599 non-null float64 Parameter 5 1599 non-null float64 Parameter 6 1599 non-null float64 Parameter 7 1599 non-null float64 Parameter 8 1599 non-null float64 Parameter 9 1599 non-null float64 Parameter 10 1599 non-null float64 Parameter 11 1599 non-null float64 Signal_Strength 1599 non-null object dtypes: float64(11), object(1) memory usage: 150.0+ KB
df.describe()
| Parameter 1 | Parameter 2 | Parameter 3 | Parameter 4 | Parameter 5 | Parameter 6 | Parameter 7 | Parameter 8 | Parameter 9 | Parameter 10 | Parameter 11 | |
|---|---|---|---|---|---|---|---|---|---|---|---|
| count | 1599.000000 | 1599.000000 | 1599.000000 | 1599.000000 | 1599.000000 | 1599.000000 | 1599.000000 | 1599.000000 | 1599.000000 | 1599.000000 | 1599.000000 |
| mean | 8.319637 | 0.527821 | 0.270976 | 2.538806 | 0.087467 | 15.874922 | 46.467792 | 0.996747 | 3.311113 | 0.658149 | 10.422983 |
| std | 1.741096 | 0.179060 | 0.194801 | 1.409928 | 0.047065 | 10.460157 | 32.895324 | 0.001887 | 0.154386 | 0.169507 | 1.065668 |
| min | 4.600000 | 0.120000 | 0.000000 | 0.900000 | 0.012000 | 1.000000 | 6.000000 | 0.990070 | 2.740000 | 0.330000 | 8.400000 |
| 25% | 7.100000 | 0.390000 | 0.090000 | 1.900000 | 0.070000 | 7.000000 | 22.000000 | 0.995600 | 3.210000 | 0.550000 | 9.500000 |
| 50% | 7.900000 | 0.520000 | 0.260000 | 2.200000 | 0.079000 | 14.000000 | 38.000000 | 0.996750 | 3.310000 | 0.620000 | 10.200000 |
| 75% | 9.200000 | 0.640000 | 0.420000 | 2.600000 | 0.090000 | 21.000000 | 62.000000 | 0.997835 | 3.400000 | 0.730000 | 11.100000 |
| max | 15.900000 | 1.580000 | 1.000000 | 15.500000 | 0.611000 | 72.000000 | 289.000000 | 1.003690 | 4.010000 | 2.000000 | 14.900000 |
columns = list(df.drop(columns=['Signal_Strength']).columns)
for column in columns:
plt.figure(figsize=(10, 5))
sns.distplot(df[column], color = "blue").set_title("Distribution of "+column)
for column in columns:
plt.figure(figsize=(10, 5))
sns.boxplot(x='Signal_Strength',y=column, data = df, hue = 'Signal_Strength')
plt.figure(figsize=(10, 5))
sns.countplot(y='Signal_Strength',hue = 'Signal_Strength', data = df)
<matplotlib.axes._subplots.AxesSubplot at 0x33b31408>
for column in columns:
print(100*"*")
print("Mean of "+str(column)+"="+str(df[column].mean()))
print("Median of "+str(column)+"="+str(df[column].median()))
print("Mode of "+str(column)+"="+str(df[column].mode()[0]))
print("Skewness in "+str(column)+"="+str(df[column].skew()))
print("Excess Kurtosis in "+str(column)+"="+str(df[column].kurtosis()))
print(100*"*")
**************************************************************************************************** Mean of Parameter 1=8.319637273295838 Median of Parameter 1=7.9 Mode of Parameter 1=7.2 Skewness in Parameter 1=0.9827514413284587 Excess Kurtosis in Parameter 1=1.1321433977276252 **************************************************************************************************** **************************************************************************************************** Mean of Parameter 2=0.5278205128205131 Median of Parameter 2=0.52 Mode of Parameter 2=0.6 Skewness in Parameter 2=0.6715925723840199 Excess Kurtosis in Parameter 2=1.2255422501791422 **************************************************************************************************** **************************************************************************************************** Mean of Parameter 3=0.2709756097560964 Median of Parameter 3=0.26 Mode of Parameter 3=0.0 Skewness in Parameter 3=0.3183372952546368 Excess Kurtosis in Parameter 3=-0.7889975153633966 **************************************************************************************************** **************************************************************************************************** Mean of Parameter 4=2.5388055034396517 Median of Parameter 4=2.2 Mode of Parameter 4=2.0 Skewness in Parameter 4=4.54065542590319 Excess Kurtosis in Parameter 4=28.617595424475443 **************************************************************************************************** **************************************************************************************************** Mean of Parameter 5=0.08746654158849257 Median of Parameter 5=0.079 Mode of Parameter 5=0.08 Skewness in Parameter 5=5.680346571971722 Excess Kurtosis in Parameter 5=41.71578724757661 **************************************************************************************************** **************************************************************************************************** Mean of Parameter 6=15.874921826141339 Median of Parameter 6=14.0 Mode of Parameter 6=6.0 Skewness in Parameter 6=1.250567293314441 Excess Kurtosis in Parameter 6=2.023562045840575 **************************************************************************************************** **************************************************************************************************** Mean of Parameter 7=46.46779237023139 Median of Parameter 7=38.0 Mode of Parameter 7=28.0 Skewness in Parameter 7=1.515531257594554 Excess Kurtosis in Parameter 7=3.8098244878645744 **************************************************************************************************** **************************************************************************************************** Mean of Parameter 8=0.9967466791744833 Median of Parameter 8=0.99675 Mode of Parameter 8=0.9972 Skewness in Parameter 8=0.07128766294945525 Excess Kurtosis in Parameter 8=0.9340790654648083 **************************************************************************************************** **************************************************************************************************** Mean of Parameter 9=3.311113195747343 Median of Parameter 9=3.31 Mode of Parameter 9=3.3 Skewness in Parameter 9=0.19368349811284427 Excess Kurtosis in Parameter 9=0.806942508246574 **************************************************************************************************** **************************************************************************************************** Mean of Parameter 10=0.6581488430268921 Median of Parameter 10=0.62 Mode of Parameter 10=0.6 Skewness in Parameter 10=2.4286723536602945 Excess Kurtosis in Parameter 10=11.720250727147674 **************************************************************************************************** **************************************************************************************************** Mean of Parameter 11=10.422983114446502 Median of Parameter 11=10.2 Mode of Parameter 11=9.5 Skewness in Parameter 11=0.8608288069184189 Excess Kurtosis in Parameter 11=0.20002931143836733 ****************************************************************************************************
Observations:
sns.pairplot(df,hue='Signal_Strength')
<seaborn.axisgrid.PairGrid at 0x34cad388>
Observation:We can see some correlation between parameter 1 and parameter 8 but we need to check the correlation statistically
df1 = df.copy()
for column in df1.drop(columns=['Signal_Strength']).columns:
df1[column] = df1[column].apply(lambda x: x if
((x - df1[column].mean())/df1[column].std()) <= 3
else
df1[column].median()
#np.nan
)
from sklearn.feature_selection import SelectKBest
from sklearn.feature_selection import chi2
df1.shape
(1599, 12)
X = df1.drop(columns=['Signal_Strength'])
y = df1['Signal_Strength'].astype('int')
orderd_rank_features = SelectKBest(score_func=chi2, k=11)
orderd_rank_features.fit(X,y)
SelectKBest(k=11, score_func=<function chi2 at 0x00000000504C93A8>)
scores = pd.DataFrame(orderd_rank_features.scores_, columns=['Scores'])
features = pd.DataFrame(X.columns, columns=['Features'])
final = pd.concat([scores,features], axis=1)
final.nlargest(11,'Scores')
| Scores | Features | |
|---|---|---|
| 6 | 2761.106769 | Parameter 7 |
| 5 | 120.932364 | Parameter 6 |
| 10 | 43.476995 | Parameter 11 |
| 2 | 13.614614 | Parameter 3 |
| 1 | 12.224059 | Parameter 2 |
| 0 | 9.455325 | Parameter 1 |
| 3 | 7.156913 | Parameter 4 |
| 9 | 5.830052 | Parameter 10 |
| 4 | 0.281285 | Parameter 5 |
| 8 | 0.126300 | Parameter 9 |
| 7 | 0.000214 | Parameter 8 |
Kbest_features = df1.drop(columns=['Parameter 5','Parameter 9','Parameter 8']).copy()
from sklearn.ensemble import ExtraTreesClassifier
model = ExtraTreesClassifier()
model.fit(X,y)
ExtraTreesClassifier(bootstrap=False, ccp_alpha=0.0, class_weight=None,
criterion='gini', max_depth=None, max_features='auto',
max_leaf_nodes=None, max_samples=None,
min_impurity_decrease=0.0, min_impurity_split=None,
min_samples_leaf=1, min_samples_split=2,
min_weight_fraction_leaf=0.0, n_estimators=100,
n_jobs=None, oob_score=False, random_state=None, verbose=0,
warm_start=False)
ranked_features = pd.Series(model.feature_importances_,index=X.columns)
ranked_features.nlargest(10).plot(kind='barh')
<matplotlib.axes._subplots.AxesSubplot at 0x72ec7288>
X = df.drop(columns=['Signal_Strength'])
vif_data = pd.DataFrame()
vif_data['feature'] = X.columns
vif_data['VIF'] = [variance_inflation_factor(X.values,i) for i in range(len(X.columns))]
vif_data
| feature | VIF | |
|---|---|---|
| 0 | Parameter 1 | 74.452265 |
| 1 | Parameter 2 | 17.060026 |
| 2 | Parameter 3 | 9.183495 |
| 3 | Parameter 4 | 4.662992 |
| 4 | Parameter 5 | 6.554877 |
| 5 | Parameter 6 | 6.442682 |
| 6 | Parameter 7 | 6.519699 |
| 7 | Parameter 8 | 1479.287209 |
| 8 | Parameter 9 | 1070.967685 |
| 9 | Parameter 10 | 21.590621 |
| 10 | Parameter 11 | 124.394866 |
sns.pairplot(df1)
<seaborn.axisgrid.PairGrid at 0x72f09548>
X = df1.drop(columns=['Signal_Strength'])
vif_data = pd.DataFrame()
vif_data['feature'] = X.columns
vif_data['VIF'] = [variance_inflation_factor(X.values,i) for i in range(len(X.columns))]
vif_data
| feature | VIF | |
|---|---|---|
| 0 | Parameter 1 | 67.525613 |
| 1 | Parameter 2 | 17.700405 |
| 2 | Parameter 3 | 8.259058 |
| 3 | Parameter 4 | 9.553091 |
| 4 | Parameter 5 | 16.211930 |
| 5 | Parameter 6 | 6.542079 |
| 6 | Parameter 7 | 6.370576 |
| 7 | Parameter 8 | 1331.472877 |
| 8 | Parameter 9 | 960.386214 |
| 9 | Parameter 10 | 29.009573 |
| 10 | Parameter 11 | 133.234695 |
Observations:
scaled = df1.copy()
for column in scaled.drop(columns=['Signal_Strength']).columns:
scaled[column] = scaled[column].apply(lambda x:
(x - scaled[column].mean())/scaled[column].std()
)
from sklearn.decomposition import PCA
features = list(scaled.drop(columns=['Signal_Strength']).columns)
pca = PCA(n_components=10)
pca.fit(np.array(scaled[features]))
variance = pca.explained_variance_ratio_
var = np.cumsum(np.round(variance,decimals = 3)*100)
var
array([27.4, 45.1, 59.4, 69.7, 77.9, 84.7, 89.9, 94. , 97. , 99.1])
plt.ylabel("% Variance explained")
plt.xlabel("Number of features")
plt.title("PCA analysis")
plt.ylim(20,110)
plt.xlim(0,11)
plt.plot(var)
[<matplotlib.lines.Line2D at 0xfebe1708>]
Observations: We could pick n_components = 6 as they account for 90% of variance in the dataset
pca = PCA(n_components=6)
principalComponents = pca.fit_transform(np.array(scaled[features]))
pcDf = pd.DataFrame(data = principalComponents
, columns = ['PC1', 'PC2','PC3','PC4','PC5','PC6'])
finalDf = pd.concat([pcDf,scaled[['Signal_Strength']]], axis=1)
finalDf.info()
<class 'pandas.core.frame.DataFrame'> RangeIndex: 1599 entries, 0 to 1598 Data columns (total 7 columns): PC1 1599 non-null float64 PC2 1599 non-null float64 PC3 1599 non-null float64 PC4 1599 non-null float64 PC5 1599 non-null float64 PC6 1599 non-null float64 Signal_Strength 1599 non-null object dtypes: float64(6), object(1) memory usage: 87.6+ KB
finalDf['Signal_Strength'].unique()
array([5, 6, 7, 4, 8, 3], dtype=object)
from keras.utils.np_utils import to_categorical
categorical_labels = to_categorical(finalDf['Signal_Strength'], num_classes=None)
len(categorical_labels[0])
9
finalDf
| PC1 | PC2 | PC3 | PC4 | PC5 | PC6 | Signal_Strength | |
|---|---|---|---|---|---|---|---|
| 0 | 1.663006 | -1.043868 | -1.414160 | -0.177092 | 0.475915 | -1.056528 | 5 |
| 1 | 0.846262 | -2.201520 | -0.042708 | 0.445265 | 0.145303 | 0.413887 | 5 |
| 2 | 0.742628 | -1.364816 | -0.714728 | 0.068164 | 0.255632 | 0.095856 | 5 |
| 3 | -2.433041 | 0.010059 | 0.731094 | -1.526152 | -0.687085 | -0.277802 | 6 |
| 4 | 1.663006 | -1.043868 | -1.414160 | -0.177092 | 0.475915 | -1.056528 | 5 |
| ... | ... | ... | ... | ... | ... | ... | ... |
| 1594 | 2.282332 | -0.572072 | 0.847368 | 0.008764 | 0.358428 | 0.682458 | 5 |
| 1595 | 2.437299 | 0.228083 | 2.257227 | 0.697218 | 0.739969 | -0.447815 | 6 |
| 1596 | 1.527470 | 0.345618 | 1.232464 | 0.556543 | 0.799146 | -0.068777 | 6 |
| 1597 | 2.455420 | -0.451180 | 1.099449 | 0.390267 | 1.083614 | -0.438698 | 5 |
| 1598 | 0.330841 | 1.277605 | 1.192040 | 1.086017 | -0.399542 | -0.058249 | 6 |
1599 rows × 7 columns
xpca = finalDf.drop(columns=['Signal_Strength']).values
ypca = categorical_labels
X_train, X_test, y_train, y_test = train_test_split(xpca, ypca,test_size=0.33, random_state=42)
model = Sequential()
model.add(Dense(32, input_dim=6, kernel_initializer='normal', activation='relu'))
model.add(Dropout(0.5))
model.add(Dense(64, activation='relu'))
model.add(Dropout(0.5))
model.add(Dense(9, activation='softmax'))
model.summary()
Model: "sequential_8" _________________________________________________________________ Layer (type) Output Shape Param # ================================================================= dense_24 (Dense) (None, 32) 224 _________________________________________________________________ dropout_16 (Dropout) (None, 32) 0 _________________________________________________________________ dense_25 (Dense) (None, 64) 2112 _________________________________________________________________ dropout_17 (Dropout) (None, 64) 0 _________________________________________________________________ dense_26 (Dense) (None, 9) 585 ================================================================= Total params: 2,921 Trainable params: 2,921 Non-trainable params: 0 _________________________________________________________________
model.compile(loss='categorical_crossentropy', optimizer='adam', metrics=['accuracy'])
history = model.fit(X_train, y_train, epochs=150, batch_size=10, verbose=1, validation_split=0.2)
Train on 856 samples, validate on 215 samples Epoch 1/150 856/856 [==============================] - 0s 403us/sample - loss: 1.8589 - acc: 0.4229 - val_loss: 1.4051 - val_acc: 0.5209 Epoch 2/150 856/856 [==============================] - 0s 120us/sample - loss: 1.3185 - acc: 0.4825 - val_loss: 1.0763 - val_acc: 0.6419 Epoch 3/150 856/856 [==============================] - 0s 124us/sample - loss: 1.1507 - acc: 0.5350 - val_loss: 1.0197 - val_acc: 0.6372 Epoch 4/150 856/856 [==============================] - 0s 120us/sample - loss: 1.1529 - acc: 0.5397 - val_loss: 0.9766 - val_acc: 0.6465 Epoch 5/150 856/856 [==============================] - 0s 182us/sample - loss: 1.1149 - acc: 0.5748 - val_loss: 0.9545 - val_acc: 0.6372 Epoch 6/150 856/856 [==============================] - 0s 132us/sample - loss: 1.1155 - acc: 0.5374 - val_loss: 0.9559 - val_acc: 0.6465 Epoch 7/150 856/856 [==============================] - 0s 204us/sample - loss: 1.0984 - acc: 0.5386 - val_loss: 0.9421 - val_acc: 0.6512 Epoch 8/150 856/856 [==============================] - 0s 124us/sample - loss: 1.0709 - acc: 0.5584 - val_loss: 0.9347 - val_acc: 0.6465 Epoch 9/150 856/856 [==============================] - 0s 123us/sample - loss: 1.0770 - acc: 0.5502 - val_loss: 0.9283 - val_acc: 0.6605 Epoch 10/150 856/856 [==============================] - 0s 119us/sample - loss: 1.0547 - acc: 0.5607 - val_loss: 0.9252 - val_acc: 0.6698 Epoch 11/150 856/856 [==============================] - 0s 131us/sample - loss: 1.0264 - acc: 0.5759 - val_loss: 0.9141 - val_acc: 0.6605 Epoch 12/150 856/856 [==============================] - 0s 122us/sample - loss: 1.0576 - acc: 0.5572 - val_loss: 0.9067 - val_acc: 0.6698 Epoch 13/150 856/856 [==============================] - 0s 133us/sample - loss: 1.0456 - acc: 0.5619 - val_loss: 0.9022 - val_acc: 0.6512 Epoch 14/150 856/856 [==============================] - 0s 120us/sample - loss: 1.0424 - acc: 0.5713 - val_loss: 0.9043 - val_acc: 0.6651 Epoch 15/150 856/856 [==============================] - 0s 134us/sample - loss: 1.0431 - acc: 0.5783 - val_loss: 0.8997 - val_acc: 0.6698 Epoch 16/150 856/856 [==============================] - 0s 126us/sample - loss: 1.0344 - acc: 0.5409 - val_loss: 0.8981 - val_acc: 0.6605 Epoch 17/150 856/856 [==============================] - 0s 122us/sample - loss: 1.0497 - acc: 0.5526 - val_loss: 0.9017 - val_acc: 0.6698 Epoch 18/150 856/856 [==============================] - 0s 117us/sample - loss: 1.0043 - acc: 0.5724 - val_loss: 0.8937 - val_acc: 0.6744 Epoch 19/150 856/856 [==============================] - 0s 118us/sample - loss: 1.0359 - acc: 0.5561 - val_loss: 0.8926 - val_acc: 0.6558 Epoch 20/150 856/856 [==============================] - 0s 133us/sample - loss: 1.0048 - acc: 0.5876 - val_loss: 0.8882 - val_acc: 0.6791 Epoch 21/150 856/856 [==============================] - 0s 126us/sample - loss: 1.0228 - acc: 0.5643 - val_loss: 0.8832 - val_acc: 0.6651 Epoch 22/150 856/856 [==============================] - 0s 131us/sample - loss: 1.0146 - acc: 0.5841 - val_loss: 0.8959 - val_acc: 0.6558 Epoch 23/150 856/856 [==============================] - 0s 126us/sample - loss: 0.9991 - acc: 0.5935 - val_loss: 0.8864 - val_acc: 0.6651 Epoch 24/150 856/856 [==============================] - 0s 126us/sample - loss: 0.9899 - acc: 0.5841 - val_loss: 0.8826 - val_acc: 0.6605 Epoch 25/150 856/856 [==============================] - 0s 154us/sample - loss: 1.0021 - acc: 0.5748 - val_loss: 0.8782 - val_acc: 0.6465 Epoch 26/150 856/856 [==============================] - 0s 139us/sample - loss: 1.0041 - acc: 0.5864 - val_loss: 0.8828 - val_acc: 0.6651 Epoch 27/150 856/856 [==============================] - 0s 130us/sample - loss: 1.0035 - acc: 0.5701 - val_loss: 0.8859 - val_acc: 0.6372 Epoch 28/150 856/856 [==============================] - 0s 129us/sample - loss: 1.0044 - acc: 0.5689 - val_loss: 0.8832 - val_acc: 0.6465 Epoch 29/150 856/856 [==============================] - 0s 125us/sample - loss: 0.9966 - acc: 0.5794 - val_loss: 0.8864 - val_acc: 0.6651 Epoch 30/150 856/856 [==============================] - 0s 162us/sample - loss: 0.9984 - acc: 0.5829 - val_loss: 0.8831 - val_acc: 0.6512 Epoch 31/150 856/856 [==============================] - 0s 160us/sample - loss: 0.9877 - acc: 0.5818 - val_loss: 0.8798 - val_acc: 0.6605 Epoch 32/150 856/856 [==============================] - 0s 132us/sample - loss: 1.0035 - acc: 0.5771 - val_loss: 0.8785 - val_acc: 0.6465 Epoch 33/150 856/856 [==============================] - 0s 127us/sample - loss: 0.9959 - acc: 0.5724 - val_loss: 0.8823 - val_acc: 0.6558 Epoch 34/150 856/856 [==============================] - 0s 122us/sample - loss: 1.0147 - acc: 0.5724 - val_loss: 0.8821 - val_acc: 0.6558 Epoch 35/150 856/856 [==============================] - 0s 139us/sample - loss: 1.0020 - acc: 0.5876 - val_loss: 0.8830 - val_acc: 0.6744 Epoch 36/150 856/856 [==============================] - 0s 167us/sample - loss: 0.9955 - acc: 0.5864 - val_loss: 0.8791 - val_acc: 0.6744 Epoch 37/150 856/856 [==============================] - 0s 126us/sample - loss: 0.9920 - acc: 0.5794 - val_loss: 0.8703 - val_acc: 0.6651 Epoch 38/150 856/856 [==============================] - 0s 132us/sample - loss: 1.0089 - acc: 0.5841 - val_loss: 0.8747 - val_acc: 0.6698 Epoch 39/150 856/856 [==============================] - 0s 145us/sample - loss: 1.0003 - acc: 0.5900 - val_loss: 0.8795 - val_acc: 0.6558 Epoch 40/150 856/856 [==============================] - 0s 129us/sample - loss: 0.9956 - acc: 0.5958 - val_loss: 0.8771 - val_acc: 0.6605 Epoch 41/150 856/856 [==============================] - 0s 119us/sample - loss: 0.9826 - acc: 0.6098 - val_loss: 0.8790 - val_acc: 0.6558 Epoch 42/150 856/856 [==============================] - 0s 125us/sample - loss: 0.9747 - acc: 0.6040 - val_loss: 0.8682 - val_acc: 0.6512 Epoch 43/150 856/856 [==============================] - 0s 127us/sample - loss: 0.9917 - acc: 0.5724 - val_loss: 0.8712 - val_acc: 0.6698 Epoch 44/150 856/856 [==============================] - 0s 125us/sample - loss: 0.9865 - acc: 0.5783 - val_loss: 0.8773 - val_acc: 0.6512 Epoch 45/150 856/856 [==============================] - 0s 136us/sample - loss: 0.9749 - acc: 0.5946 - val_loss: 0.8713 - val_acc: 0.6419 Epoch 46/150 856/856 [==============================] - 0s 138us/sample - loss: 0.9869 - acc: 0.5829 - val_loss: 0.8770 - val_acc: 0.6512 Epoch 47/150 856/856 [==============================] - 0s 133us/sample - loss: 0.9855 - acc: 0.6086 - val_loss: 0.8802 - val_acc: 0.6512 Epoch 48/150 856/856 [==============================] - 0s 126us/sample - loss: 0.9728 - acc: 0.6157 - val_loss: 0.8731 - val_acc: 0.6605 Epoch 49/150 856/856 [==============================] - 0s 127us/sample - loss: 0.9993 - acc: 0.5853 - val_loss: 0.8827 - val_acc: 0.6372 Epoch 50/150 856/856 [==============================] - 0s 127us/sample - loss: 0.9820 - acc: 0.5829 - val_loss: 0.8754 - val_acc: 0.6465 Epoch 51/150 856/856 [==============================] - 0s 161us/sample - loss: 0.9809 - acc: 0.5818 - val_loss: 0.8767 - val_acc: 0.6465 Epoch 52/150 856/856 [==============================] - 0s 153us/sample - loss: 0.9749 - acc: 0.5841 - val_loss: 0.8717 - val_acc: 0.6605 Epoch 53/150 856/856 [==============================] - 0s 120us/sample - loss: 0.9896 - acc: 0.5829 - val_loss: 0.8788 - val_acc: 0.6326 Epoch 54/150 856/856 [==============================] - 0s 120us/sample - loss: 0.9595 - acc: 0.5958 - val_loss: 0.8742 - val_acc: 0.6465 Epoch 55/150 856/856 [==============================] - 0s 120us/sample - loss: 0.9602 - acc: 0.5970 - val_loss: 0.8718 - val_acc: 0.6605 Epoch 56/150 856/856 [==============================] - 0s 110us/sample - loss: 0.9819 - acc: 0.5829 - val_loss: 0.8699 - val_acc: 0.6512 Epoch 57/150 856/856 [==============================] - 0s 111us/sample - loss: 0.9743 - acc: 0.5970 - val_loss: 0.8721 - val_acc: 0.6512 Epoch 58/150 856/856 [==============================] - 0s 116us/sample - loss: 0.9678 - acc: 0.5911 - val_loss: 0.8646 - val_acc: 0.6558 Epoch 59/150 856/856 [==============================] - 0s 123us/sample - loss: 0.9820 - acc: 0.5958 - val_loss: 0.8675 - val_acc: 0.6605 Epoch 60/150 856/856 [==============================] - 0s 114us/sample - loss: 0.9694 - acc: 0.5888 - val_loss: 0.8735 - val_acc: 0.6465 Epoch 61/150 856/856 [==============================] - 0s 114us/sample - loss: 0.9825 - acc: 0.5771 - val_loss: 0.8709 - val_acc: 0.6558 Epoch 62/150 856/856 [==============================] - 0s 127us/sample - loss: 0.9681 - acc: 0.5888 - val_loss: 0.8697 - val_acc: 0.6512 Epoch 63/150 856/856 [==============================] - 0s 140us/sample - loss: 0.9670 - acc: 0.5923 - val_loss: 0.8739 - val_acc: 0.6326 Epoch 64/150 856/856 [==============================] - 0s 116us/sample - loss: 0.9857 - acc: 0.5900 - val_loss: 0.8758 - val_acc: 0.6605 Epoch 65/150 856/856 [==============================] - 0s 116us/sample - loss: 0.9751 - acc: 0.5911 - val_loss: 0.8686 - val_acc: 0.6605 Epoch 66/150 856/856 [==============================] - 0s 118us/sample - loss: 0.9737 - acc: 0.6040 - val_loss: 0.8712 - val_acc: 0.6651 Epoch 67/150 856/856 [==============================] - 0s 112us/sample - loss: 0.9617 - acc: 0.6016 - val_loss: 0.8722 - val_acc: 0.6651 Epoch 68/150 856/856 [==============================] - 0s 123us/sample - loss: 0.9901 - acc: 0.5888 - val_loss: 0.8727 - val_acc: 0.6698 Epoch 69/150 856/856 [==============================] - 0s 120us/sample - loss: 0.9455 - acc: 0.5958 - val_loss: 0.8702 - val_acc: 0.6512 Epoch 70/150 856/856 [==============================] - 0s 126us/sample - loss: 0.9436 - acc: 0.5993 - val_loss: 0.8644 - val_acc: 0.6605 Epoch 71/150 856/856 [==============================] - 0s 120us/sample - loss: 0.9690 - acc: 0.6005 - val_loss: 0.8692 - val_acc: 0.6605 Epoch 72/150 856/856 [==============================] - 0s 112us/sample - loss: 0.9507 - acc: 0.6215 - val_loss: 0.8663 - val_acc: 0.6651 Epoch 73/150 856/856 [==============================] - 0s 172us/sample - loss: 0.9816 - acc: 0.6028 - val_loss: 0.8687 - val_acc: 0.6558 Epoch 74/150 856/856 [==============================] - 0s 168us/sample - loss: 0.9722 - acc: 0.5841 - val_loss: 0.8696 - val_acc: 0.6698 Epoch 75/150 856/856 [==============================] - 0s 123us/sample - loss: 0.9752 - acc: 0.5853 - val_loss: 0.8747 - val_acc: 0.6419 Epoch 76/150 856/856 [==============================] - 0s 186us/sample - loss: 0.9656 - acc: 0.5900 - val_loss: 0.8708 - val_acc: 0.6558 Epoch 77/150 856/856 [==============================] - 0s 151us/sample - loss: 0.9600 - acc: 0.6121 - val_loss: 0.8715 - val_acc: 0.6605 Epoch 78/150 856/856 [==============================] - 0s 127us/sample - loss: 0.9562 - acc: 0.5935 - val_loss: 0.8730 - val_acc: 0.6465 Epoch 79/150 856/856 [==============================] - 0s 130us/sample - loss: 0.9792 - acc: 0.5783 - val_loss: 0.8710 - val_acc: 0.6465 Epoch 80/150 856/856 [==============================] - 0s 174us/sample - loss: 0.9624 - acc: 0.5981 - val_loss: 0.8685 - val_acc: 0.6512 Epoch 81/150 856/856 [==============================] - 0s 124us/sample - loss: 0.9531 - acc: 0.6145 - val_loss: 0.8728 - val_acc: 0.6512 Epoch 82/150 856/856 [==============================] - 0s 112us/sample - loss: 0.9591 - acc: 0.5900 - val_loss: 0.8737 - val_acc: 0.6465 Epoch 83/150 856/856 [==============================] - 0s 111us/sample - loss: 0.9731 - acc: 0.5981 - val_loss: 0.8842 - val_acc: 0.6512 Epoch 84/150 856/856 [==============================] - 0s 114us/sample - loss: 0.9606 - acc: 0.5888 - val_loss: 0.8727 - val_acc: 0.6465 Epoch 85/150 856/856 [==============================] - 0s 112us/sample - loss: 0.9357 - acc: 0.6005 - val_loss: 0.8658 - val_acc: 0.6465 Epoch 86/150 856/856 [==============================] - 0s 132us/sample - loss: 0.9690 - acc: 0.6040 - val_loss: 0.8674 - val_acc: 0.6512 Epoch 87/150 856/856 [==============================] - 0s 131us/sample - loss: 0.9645 - acc: 0.5958 - val_loss: 0.8681 - val_acc: 0.6605 Epoch 88/150 856/856 [==============================] - 0s 120us/sample - loss: 0.9596 - acc: 0.5864 - val_loss: 0.8719 - val_acc: 0.6605 Epoch 89/150 856/856 [==============================] - 0s 118us/sample - loss: 0.9593 - acc: 0.6110 - val_loss: 0.8715 - val_acc: 0.6512 Epoch 90/150 856/856 [==============================] - 0s 122us/sample - loss: 0.9555 - acc: 0.6110 - val_loss: 0.8717 - val_acc: 0.6558 Epoch 91/150 856/856 [==============================] - 0s 146us/sample - loss: 0.9570 - acc: 0.6075 - val_loss: 0.8722 - val_acc: 0.6419 Epoch 92/150 856/856 [==============================] - 0s 130us/sample - loss: 0.9488 - acc: 0.6028 - val_loss: 0.8730 - val_acc: 0.6419 Epoch 93/150 856/856 [==============================] - 0s 120us/sample - loss: 0.9499 - acc: 0.5970 - val_loss: 0.8615 - val_acc: 0.6419 Epoch 94/150 856/856 [==============================] - 0s 113us/sample - loss: 0.9408 - acc: 0.6098 - val_loss: 0.8602 - val_acc: 0.6558 Epoch 95/150 856/856 [==============================] - 0s 132us/sample - loss: 0.9630 - acc: 0.6051 - val_loss: 0.8668 - val_acc: 0.6512 Epoch 96/150 856/856 [==============================] - 0s 125us/sample - loss: 0.9642 - acc: 0.6121 - val_loss: 0.8704 - val_acc: 0.6558 Epoch 97/150 856/856 [==============================] - 0s 200us/sample - loss: 0.9589 - acc: 0.5935 - val_loss: 0.8699 - val_acc: 0.6512 Epoch 98/150 856/856 [==============================] - 0s 132us/sample - loss: 0.9424 - acc: 0.6005 - val_loss: 0.8649 - val_acc: 0.6558 Epoch 99/150 856/856 [==============================] - 0s 113us/sample - loss: 0.9492 - acc: 0.6005 - val_loss: 0.8702 - val_acc: 0.6558 Epoch 100/150 856/856 [==============================] - 0s 130us/sample - loss: 0.9386 - acc: 0.6168 - val_loss: 0.8691 - val_acc: 0.6558 Epoch 101/150 856/856 [==============================] - 0s 155us/sample - loss: 0.9396 - acc: 0.6238 - val_loss: 0.8610 - val_acc: 0.6605 Epoch 102/150 856/856 [==============================] - 0s 124us/sample - loss: 0.9517 - acc: 0.6133 - val_loss: 0.8652 - val_acc: 0.6558 Epoch 103/150 856/856 [==============================] - 0s 120us/sample - loss: 0.9519 - acc: 0.6016 - val_loss: 0.8693 - val_acc: 0.6512 Epoch 104/150 856/856 [==============================] - 0s 138us/sample - loss: 0.9498 - acc: 0.6098 - val_loss: 0.8775 - val_acc: 0.6326 Epoch 105/150 856/856 [==============================] - 0s 140us/sample - loss: 0.9649 - acc: 0.6040 - val_loss: 0.8702 - val_acc: 0.6605 Epoch 106/150 856/856 [==============================] - 0s 136us/sample - loss: 0.9450 - acc: 0.6086 - val_loss: 0.8684 - val_acc: 0.6512 Epoch 107/150 856/856 [==============================] - 0s 138us/sample - loss: 0.9541 - acc: 0.6075 - val_loss: 0.8670 - val_acc: 0.6698 Epoch 108/150 856/856 [==============================] - 0s 129us/sample - loss: 0.9534 - acc: 0.6145 - val_loss: 0.8619 - val_acc: 0.6512 Epoch 109/150 856/856 [==============================] - 0s 113us/sample - loss: 0.9490 - acc: 0.5958 - val_loss: 0.8650 - val_acc: 0.6512 Epoch 110/150 856/856 [==============================] - 0s 152us/sample - loss: 0.9353 - acc: 0.5970 - val_loss: 0.8640 - val_acc: 0.6651 Epoch 111/150 856/856 [==============================] - 0s 161us/sample - loss: 0.9511 - acc: 0.6285 - val_loss: 0.8672 - val_acc: 0.6419 Epoch 112/150 856/856 [==============================] - 0s 117us/sample - loss: 0.9558 - acc: 0.6040 - val_loss: 0.8672 - val_acc: 0.6512 Epoch 113/150 856/856 [==============================] - 0s 120us/sample - loss: 0.9415 - acc: 0.5970 - val_loss: 0.8686 - val_acc: 0.6465 Epoch 114/150 856/856 [==============================] - 0s 117us/sample - loss: 0.9366 - acc: 0.5981 - val_loss: 0.8691 - val_acc: 0.6279 Epoch 115/150 856/856 [==============================] - 0s 120us/sample - loss: 0.9267 - acc: 0.6238 - val_loss: 0.8629 - val_acc: 0.6465 Epoch 116/150 856/856 [==============================] - 0s 114us/sample - loss: 0.9373 - acc: 0.6215 - val_loss: 0.8618 - val_acc: 0.6558 Epoch 117/150 856/856 [==============================] - 0s 120us/sample - loss: 0.9534 - acc: 0.6016 - val_loss: 0.8622 - val_acc: 0.6558 Epoch 118/150 856/856 [==============================] - 0s 131us/sample - loss: 0.9410 - acc: 0.5935 - val_loss: 0.8637 - val_acc: 0.6558 Epoch 119/150 856/856 [==============================] - 0s 140us/sample - loss: 0.9423 - acc: 0.6075 - val_loss: 0.8641 - val_acc: 0.6512 Epoch 120/150 856/856 [==============================] - 0s 130us/sample - loss: 0.9346 - acc: 0.6075 - val_loss: 0.8653 - val_acc: 0.6512 Epoch 121/150 856/856 [==============================] - 0s 138us/sample - loss: 0.9573 - acc: 0.5981 - val_loss: 0.8591 - val_acc: 0.6465 Epoch 122/150 856/856 [==============================] - 0s 140us/sample - loss: 0.9438 - acc: 0.6098 - val_loss: 0.8592 - val_acc: 0.6651 Epoch 123/150 856/856 [==============================] - 0s 117us/sample - loss: 0.9487 - acc: 0.6098 - val_loss: 0.8662 - val_acc: 0.6465 Epoch 124/150 856/856 [==============================] - 0s 116us/sample - loss: 0.9483 - acc: 0.5911 - val_loss: 0.8639 - val_acc: 0.6558 Epoch 125/150 856/856 [==============================] - 0s 125us/sample - loss: 0.9169 - acc: 0.6238 - val_loss: 0.8597 - val_acc: 0.6512 Epoch 126/150 856/856 [==============================] - 0s 155us/sample - loss: 0.9504 - acc: 0.5946 - val_loss: 0.8632 - val_acc: 0.6419 Epoch 127/150 856/856 [==============================] - 0s 131us/sample - loss: 0.9528 - acc: 0.6157 - val_loss: 0.8648 - val_acc: 0.6558 Epoch 128/150 856/856 [==============================] - 0s 118us/sample - loss: 0.9314 - acc: 0.6203 - val_loss: 0.8619 - val_acc: 0.6419 Epoch 129/150 856/856 [==============================] - 0s 116us/sample - loss: 0.9409 - acc: 0.6063 - val_loss: 0.8705 - val_acc: 0.6512 Epoch 130/150 856/856 [==============================] - 0s 113us/sample - loss: 0.9437 - acc: 0.6110 - val_loss: 0.8646 - val_acc: 0.6651 Epoch 131/150 856/856 [==============================] - 0s 122us/sample - loss: 0.9211 - acc: 0.6110 - val_loss: 0.8608 - val_acc: 0.6558 Epoch 132/150 856/856 [==============================] - 0s 119us/sample - loss: 0.9344 - acc: 0.6040 - val_loss: 0.8660 - val_acc: 0.6512 Epoch 133/150 856/856 [==============================] - 0s 129us/sample - loss: 0.9382 - acc: 0.6005 - val_loss: 0.8674 - val_acc: 0.6558 Epoch 134/150 856/856 [==============================] - 0s 151us/sample - loss: 0.9451 - acc: 0.5923 - val_loss: 0.8673 - val_acc: 0.6465 Epoch 135/150 856/856 [==============================] - 0s 127us/sample - loss: 0.9412 - acc: 0.6192 - val_loss: 0.8658 - val_acc: 0.6419 Epoch 136/150 856/856 [==============================] - 0s 126us/sample - loss: 0.9436 - acc: 0.6215 - val_loss: 0.8679 - val_acc: 0.6465 Epoch 137/150 856/856 [==============================] - 0s 172us/sample - loss: 0.9433 - acc: 0.6110 - val_loss: 0.8732 - val_acc: 0.6512 Epoch 138/150 856/856 [==============================] - 0s 157us/sample - loss: 0.9337 - acc: 0.6063 - val_loss: 0.8646 - val_acc: 0.6465 Epoch 139/150 856/856 [==============================] - 0s 124us/sample - loss: 0.9507 - acc: 0.6016 - val_loss: 0.8694 - val_acc: 0.6605 Epoch 140/150 856/856 [==============================] - 0s 122us/sample - loss: 0.9213 - acc: 0.6273 - val_loss: 0.8667 - val_acc: 0.6558 Epoch 141/150 856/856 [==============================] - 0s 123us/sample - loss: 0.9226 - acc: 0.6121 - val_loss: 0.8718 - val_acc: 0.6512 Epoch 142/150 856/856 [==============================] - 0s 218us/sample - loss: 0.9379 - acc: 0.5935 - val_loss: 0.8704 - val_acc: 0.6558 Epoch 143/150 856/856 [==============================] - 0s 120us/sample - loss: 0.9350 - acc: 0.6051 - val_loss: 0.8728 - val_acc: 0.6512 Epoch 144/150 856/856 [==============================] - 0s 113us/sample - loss: 0.9227 - acc: 0.6250 - val_loss: 0.8671 - val_acc: 0.6651 Epoch 145/150 856/856 [==============================] - 0s 111us/sample - loss: 0.9487 - acc: 0.6157 - val_loss: 0.8681 - val_acc: 0.6512 Epoch 146/150 856/856 [==============================] - 0s 123us/sample - loss: 0.9183 - acc: 0.6168 - val_loss: 0.8679 - val_acc: 0.6465 Epoch 147/150 856/856 [==============================] - 0s 118us/sample - loss: 0.9373 - acc: 0.6121 - val_loss: 0.8706 - val_acc: 0.6465 Epoch 148/150 856/856 [==============================] - 0s 113us/sample - loss: 0.9354 - acc: 0.6110 - val_loss: 0.8701 - val_acc: 0.6419 Epoch 149/150 856/856 [==============================] - 0s 116us/sample - loss: 0.9287 - acc: 0.6297 - val_loss: 0.8675 - val_acc: 0.6512 Epoch 150/150 856/856 [==============================] - 0s 161us/sample - loss: 0.9290 - acc: 0.6040 - val_loss: 0.8718 - val_acc: 0.6465
print(history.history.keys())
plt.plot(history.history['loss'])
plt.plot(history.history['val_loss'])
plt.title('model loss')
plt.ylabel('loss')
plt.xlabel('epoch')
plt.legend(['train', 'validation'], loc='upper left')
plt.show()
dict_keys(['loss', 'acc', 'val_loss', 'val_acc'])
#y_pred = model.predict(X_test)
y_pred = model.predict_classes(X_test)
y_classes = [np.argmax(y, axis=None, out=None) for y in y_test]
y_train_classes = [np.argmax(y, axis=None, out=None) for y in y_train]
sns.countplot(y_train_classes)
<matplotlib.axes._subplots.AxesSubplot at 0x66f2ca08>
sns.countplot(y_classes)
<matplotlib.axes._subplots.AxesSubplot at 0x670d0f08>
sns.countplot(y_pred)
<matplotlib.axes._subplots.AxesSubplot at 0x69622388>
from sklearn.metrics import classification_report
print(classification_report(y_pred,y_classes))
precision recall f1-score support
3 0.00 0.00 0.00 0
4 0.00 0.00 0.00 0
5 0.75 0.62 0.68 262
6 0.46 0.52 0.49 189
7 0.53 0.48 0.50 77
8 0.00 0.00 0.00 0
accuracy 0.56 528
macro avg 0.29 0.27 0.28 528
weighted avg 0.61 0.56 0.59 528
Observations:
for column in df1.drop(columns=['Signal_Strength']).columns:
df1[column] = df1[column].apply(lambda x:
(x - df1[column].mean())/df1[column].std()
)
categorical_labels = to_categorical(df1['Signal_Strength'], num_classes=None)
X = df1.drop(columns=['Signal_Strength']).values
Y = categorical_labels
X_train, X_test, y_train, y_test = train_test_split(X, Y,test_size=0.33, random_state=42)
y_train_classes = [np.argmax(y, axis=None, out=None) for y in y_train]
from imblearn.over_sampling import SMOTE
sm = SMOTE(random_state=2)
X_train_res,y_train_res = sm.fit_sample(X_train,y_train_classes)
y_train_res = to_categorical(y_train_res,num_classes=None)
y_train_classes = [np.argmax(y, axis=None, out=None) for y in y_train_res]
model1 = Sequential()
model1.add(Dense(20, input_dim=11, kernel_initializer='normal', activation='relu'))
model1.add(Dropout(0.25))
model1.add(Dense(20, activation='relu'))
model1.add(Dropout(0.25))
model1.add(Dense(20, activation='relu'))
model1.add(Dropout(0.25))
model1.add(Dense(9, activation='softmax'))
model1.summary()
Model: "sequential_23" _________________________________________________________________ Layer (type) Output Shape Param # ================================================================= dense_77 (Dense) (None, 20) 240 _________________________________________________________________ dropout_54 (Dropout) (None, 20) 0 _________________________________________________________________ dense_78 (Dense) (None, 20) 420 _________________________________________________________________ dropout_55 (Dropout) (None, 20) 0 _________________________________________________________________ dense_79 (Dense) (None, 20) 420 _________________________________________________________________ dropout_56 (Dropout) (None, 20) 0 _________________________________________________________________ dense_80 (Dense) (None, 9) 189 ================================================================= Total params: 1,269 Trainable params: 1,269 Non-trainable params: 0 _________________________________________________________________
from tensorflow import keras
lr_schedule = keras.optimizers.schedules.ExponentialDecay(
initial_learning_rate=1e-2,
decay_steps=10000,
decay_rate=0.9)
sgd = keras.optimizers.SGD(learning_rate=lr_schedule)
model1.compile(loss='categorical_crossentropy', optimizer=sgd, metrics=['accuracy'])
history = model1.fit(X_train_res, y_train_res, epochs=200, batch_size=10, verbose=1, validation_split=0.2)
Train on 2227 samples, validate on 557 samples Epoch 1/200 2227/2227 [==============================] - 1s 393us/sample - loss: 2.0198 - acc: 0.2326 - val_loss: 2.5098 - val_acc: 0.1400 Epoch 2/200 2227/2227 [==============================] - 0s 154us/sample - loss: 1.7571 - acc: 0.3035 - val_loss: 3.0194 - val_acc: 0.0952 Epoch 3/200 2227/2227 [==============================] - 0s 161us/sample - loss: 1.5943 - acc: 0.3408 - val_loss: 3.5634 - val_acc: 0.0862 Epoch 4/200 2227/2227 [==============================] - 0s 156us/sample - loss: 1.4632 - acc: 0.3754 - val_loss: 4.1195 - val_acc: 0.0305 Epoch 5/200 2227/2227 [==============================] - 0s 154us/sample - loss: 1.3965 - acc: 0.4109 - val_loss: 4.4781 - val_acc: 0.1364 Epoch 6/200 2227/2227 [==============================] - 0s 154us/sample - loss: 1.3567 - acc: 0.4284 - val_loss: 4.5553 - val_acc: 0.1131 Epoch 7/200 2227/2227 [==============================] - 0s 159us/sample - loss: 1.3271 - acc: 0.4387 - val_loss: 4.7686 - val_acc: 0.1257 Epoch 8/200 2227/2227 [==============================] - 0s 177us/sample - loss: 1.2737 - acc: 0.4656 - val_loss: 4.9856 - val_acc: 0.1167 Epoch 9/200 2227/2227 [==============================] - 0s 167us/sample - loss: 1.2620 - acc: 0.4576 - val_loss: 5.0313 - val_acc: 0.1364 Epoch 10/200 2227/2227 [==============================] - 0s 163us/sample - loss: 1.2287 - acc: 0.4737 - val_loss: 5.0977 - val_acc: 0.1436 Epoch 11/200 2227/2227 [==============================] - 0s 178us/sample - loss: 1.2251 - acc: 0.4877 - val_loss: 5.1169 - val_acc: 0.1149 Epoch 12/200 2227/2227 [==============================] - 0s 172us/sample - loss: 1.2174 - acc: 0.4755 - val_loss: 4.9095 - val_acc: 0.1005 Epoch 13/200 2227/2227 [==============================] - 0s 158us/sample - loss: 1.2097 - acc: 0.4841 - val_loss: 4.6942 - val_acc: 0.1149 Epoch 14/200 2227/2227 [==============================] - 0s 178us/sample - loss: 1.2011 - acc: 0.4953 - val_loss: 4.7584 - val_acc: 0.1382 Epoch 15/200 2227/2227 [==============================] - 0s 172us/sample - loss: 1.1904 - acc: 0.4966 - val_loss: 4.5348 - val_acc: 0.1203 Epoch 16/200 2227/2227 [==============================] - 0s 153us/sample - loss: 1.1666 - acc: 0.5025 - val_loss: 4.4723 - val_acc: 0.1167 Epoch 17/200 2227/2227 [==============================] - 0s 161us/sample - loss: 1.1542 - acc: 0.5177 - val_loss: 4.4530 - val_acc: 0.0969 Epoch 18/200 2227/2227 [==============================] - 0s 181us/sample - loss: 1.1458 - acc: 0.5173 - val_loss: 4.4160 - val_acc: 0.1239 Epoch 19/200 2227/2227 [==============================] - 0s 165us/sample - loss: 1.1419 - acc: 0.5209 - val_loss: 4.3840 - val_acc: 0.0539 Epoch 20/200 2227/2227 [==============================] - 0s 153us/sample - loss: 1.1439 - acc: 0.5106 - val_loss: 4.2462 - val_acc: 0.1329 Epoch 21/200 2227/2227 [==============================] - 0s 188us/sample - loss: 1.1344 - acc: 0.5236 - val_loss: 4.0229 - val_acc: 0.1203 Epoch 22/200 2227/2227 [==============================] - 0s 155us/sample - loss: 1.1117 - acc: 0.5519 - val_loss: 4.0518 - val_acc: 0.0987 Epoch 23/200 2227/2227 [==============================] - 0s 167us/sample - loss: 1.1021 - acc: 0.5285 - val_loss: 4.1029 - val_acc: 0.1257 Epoch 24/200 2227/2227 [==============================] - 0s 180us/sample - loss: 1.1074 - acc: 0.5393 - val_loss: 3.9681 - val_acc: 0.1005 Epoch 25/200 2227/2227 [==============================] - 0s 185us/sample - loss: 1.0908 - acc: 0.5442 - val_loss: 3.8953 - val_acc: 0.1311 Epoch 26/200 2227/2227 [==============================] - 0s 205us/sample - loss: 1.0908 - acc: 0.5384 - val_loss: 3.9189 - val_acc: 0.1131 Epoch 27/200 2227/2227 [==============================] - 0s 163us/sample - loss: 1.0895 - acc: 0.5465 - val_loss: 3.8725 - val_acc: 0.0880 Epoch 28/200 2227/2227 [==============================] - 0s 159us/sample - loss: 1.1105 - acc: 0.5415 - val_loss: 3.7097 - val_acc: 0.1203 Epoch 29/200 2227/2227 [==============================] - 0s 154us/sample - loss: 1.0705 - acc: 0.5550 - val_loss: 3.8420 - val_acc: 0.1329 Epoch 30/200 2227/2227 [==============================] - 0s 158us/sample - loss: 1.0690 - acc: 0.5528 - val_loss: 3.7532 - val_acc: 0.1185 Epoch 31/200 2227/2227 [==============================] - 0s 158us/sample - loss: 1.0459 - acc: 0.5510 - val_loss: 3.5311 - val_acc: 0.1239 Epoch 32/200 2227/2227 [==============================] - 0s 158us/sample - loss: 1.0632 - acc: 0.5608 - val_loss: 3.6312 - val_acc: 0.1185 Epoch 33/200 2227/2227 [==============================] - 0s 165us/sample - loss: 1.0537 - acc: 0.5577 - val_loss: 3.6048 - val_acc: 0.1149 Epoch 34/200 2227/2227 [==============================] - 0s 199us/sample - loss: 1.0656 - acc: 0.5604 - val_loss: 3.6391 - val_acc: 0.1257 Epoch 35/200 2227/2227 [==============================] - 0s 194us/sample - loss: 1.0508 - acc: 0.5644 - val_loss: 3.6019 - val_acc: 0.1239 Epoch 36/200 2227/2227 [==============================] - 0s 159us/sample - loss: 1.0360 - acc: 0.5667 - val_loss: 3.5419 - val_acc: 0.1203 Epoch 37/200 2227/2227 [==============================] - 0s 156us/sample - loss: 1.0261 - acc: 0.5626 - val_loss: 3.5454 - val_acc: 0.1167 Epoch 38/200 2227/2227 [==============================] - 0s 166us/sample - loss: 1.0434 - acc: 0.5617 - val_loss: 3.4077 - val_acc: 0.1275 Epoch 39/200 2227/2227 [==============================] - 0s 172us/sample - loss: 1.0130 - acc: 0.5779 - val_loss: 3.4967 - val_acc: 0.1131 Epoch 40/200 2227/2227 [==============================] - 0s 185us/sample - loss: 1.0233 - acc: 0.5757 - val_loss: 3.4735 - val_acc: 0.1095 Epoch 41/200 2227/2227 [==============================] - 0s 192us/sample - loss: 1.0138 - acc: 0.5824 - val_loss: 3.4408 - val_acc: 0.0916 Epoch 42/200 2227/2227 [==============================] - 0s 173us/sample - loss: 1.0139 - acc: 0.5680 - val_loss: 3.4815 - val_acc: 0.1059 Epoch 43/200 2227/2227 [==============================] - 0s 173us/sample - loss: 1.0163 - acc: 0.5828 - val_loss: 3.3293 - val_acc: 0.1203 Epoch 44/200 2227/2227 [==============================] - 0s 176us/sample - loss: 0.9944 - acc: 0.5815 - val_loss: 3.2040 - val_acc: 0.0844 Epoch 45/200 2227/2227 [==============================] - 0s 167us/sample - loss: 0.9983 - acc: 0.5698 - val_loss: 3.4153 - val_acc: 0.1167 Epoch 46/200 2227/2227 [==============================] - 0s 193us/sample - loss: 1.0038 - acc: 0.5802 - val_loss: 3.5126 - val_acc: 0.1185 Epoch 47/200 2227/2227 [==============================] - 0s 152us/sample - loss: 0.9993 - acc: 0.5739 - val_loss: 3.5727 - val_acc: 0.1113 Epoch 48/200 2227/2227 [==============================] - 0s 187us/sample - loss: 1.0013 - acc: 0.5828 - val_loss: 3.5892 - val_acc: 0.1203 Epoch 49/200 2227/2227 [==============================] - 0s 163us/sample - loss: 0.9540 - acc: 0.6022 - val_loss: 3.7001 - val_acc: 0.1203 Epoch 50/200 2227/2227 [==============================] - 0s 158us/sample - loss: 0.9870 - acc: 0.5963 - val_loss: 3.5774 - val_acc: 0.0557 Epoch 51/200 2227/2227 [==============================] - 0s 163us/sample - loss: 0.9712 - acc: 0.5846 - val_loss: 3.5321 - val_acc: 0.0969 Epoch 52/200 2227/2227 [==============================] - 0s 155us/sample - loss: 0.9888 - acc: 0.5914 - val_loss: 3.3612 - val_acc: 0.0952 Epoch 53/200 2227/2227 [==============================] - 0s 153us/sample - loss: 0.9617 - acc: 0.5954 - val_loss: 3.3292 - val_acc: 0.0880 Epoch 54/200 2227/2227 [==============================] - 0s 157us/sample - loss: 0.9648 - acc: 0.5968 - val_loss: 3.4007 - val_acc: 0.1167 Epoch 55/200 2227/2227 [==============================] - 0s 148us/sample - loss: 0.9517 - acc: 0.6026 - val_loss: 3.4119 - val_acc: 0.1095 Epoch 56/200 2227/2227 [==============================] - 0s 190us/sample - loss: 0.9431 - acc: 0.6035 - val_loss: 3.4207 - val_acc: 0.1113 Epoch 57/200 2227/2227 [==============================] - 0s 181us/sample - loss: 0.9679 - acc: 0.5995 - val_loss: 3.4373 - val_acc: 0.1113 Epoch 58/200 2227/2227 [==============================] - 0s 173us/sample - loss: 0.9734 - acc: 0.6035 - val_loss: 3.3675 - val_acc: 0.0952 Epoch 59/200 2227/2227 [==============================] - 0s 180us/sample - loss: 0.9456 - acc: 0.6062 - val_loss: 3.4285 - val_acc: 0.1113 Epoch 60/200 2227/2227 [==============================] - 0s 176us/sample - loss: 0.9408 - acc: 0.6125 - val_loss: 3.4801 - val_acc: 0.1221 Epoch 61/200 2227/2227 [==============================] - 0s 190us/sample - loss: 0.9482 - acc: 0.6004 - val_loss: 3.3930 - val_acc: 0.0862 Epoch 62/200 2227/2227 [==============================] - 0s 167us/sample - loss: 0.9410 - acc: 0.5999 - val_loss: 3.2920 - val_acc: 0.1023 Epoch 63/200 2227/2227 [==============================] - 0s 172us/sample - loss: 0.9443 - acc: 0.6031 - val_loss: 3.3909 - val_acc: 0.1149 Epoch 64/200 2227/2227 [==============================] - 0s 186us/sample - loss: 0.9182 - acc: 0.6102 - val_loss: 3.4333 - val_acc: 0.1095 Epoch 65/200 2227/2227 [==============================] - 0s 196us/sample - loss: 0.9334 - acc: 0.6062 - val_loss: 3.4077 - val_acc: 0.1167 Epoch 66/200 2227/2227 [==============================] - 1s 234us/sample - loss: 0.9251 - acc: 0.6075 - val_loss: 3.4393 - val_acc: 0.1203 Epoch 67/200 2227/2227 [==============================] - 0s 175us/sample - loss: 0.9213 - acc: 0.6170 - val_loss: 3.4204 - val_acc: 0.1149 Epoch 68/200 2227/2227 [==============================] - 0s 165us/sample - loss: 0.9229 - acc: 0.6089 - val_loss: 3.2635 - val_acc: 0.0898 Epoch 69/200 2227/2227 [==============================] - 0s 172us/sample - loss: 0.9226 - acc: 0.6111 - val_loss: 3.3604 - val_acc: 0.1185 Epoch 70/200 2227/2227 [==============================] - 0s 170us/sample - loss: 0.9172 - acc: 0.6044 - val_loss: 3.3547 - val_acc: 0.1257 Epoch 71/200 2227/2227 [==============================] - 0s 152us/sample - loss: 0.9252 - acc: 0.6170 - val_loss: 3.3088 - val_acc: 0.1131 Epoch 72/200 2227/2227 [==============================] - 0s 170us/sample - loss: 0.9058 - acc: 0.6273 - val_loss: 3.2979 - val_acc: 0.1239 Epoch 73/200 2227/2227 [==============================] - 0s 166us/sample - loss: 0.8998 - acc: 0.6264 - val_loss: 3.2219 - val_acc: 0.1239 Epoch 74/200 2227/2227 [==============================] - 0s 180us/sample - loss: 0.9185 - acc: 0.6107 - val_loss: 3.1362 - val_acc: 0.1095 Epoch 75/200 2227/2227 [==============================] - 0s 181us/sample - loss: 0.8987 - acc: 0.6318 - val_loss: 3.1706 - val_acc: 0.1167 Epoch 76/200 2227/2227 [==============================] - 0s 159us/sample - loss: 0.9031 - acc: 0.6206 - val_loss: 3.2503 - val_acc: 0.1131 Epoch 77/200 2227/2227 [==============================] - 0s 170us/sample - loss: 0.8877 - acc: 0.6228 - val_loss: 3.3912 - val_acc: 0.1203 Epoch 78/200 2227/2227 [==============================] - 0s 171us/sample - loss: 0.9031 - acc: 0.6291 - val_loss: 3.2938 - val_acc: 0.1167 Epoch 79/200 2227/2227 [==============================] - 0s 159us/sample - loss: 0.8959 - acc: 0.6201 - val_loss: 3.3708 - val_acc: 0.1221 Epoch 80/200 2227/2227 [==============================] - 0s 157us/sample - loss: 0.8902 - acc: 0.6309 - val_loss: 3.2409 - val_acc: 0.1203 Epoch 81/200 2227/2227 [==============================] - 0s 154us/sample - loss: 0.8633 - acc: 0.6322 - val_loss: 3.2070 - val_acc: 0.1364 Epoch 82/200 2227/2227 [==============================] - 0s 155us/sample - loss: 0.8714 - acc: 0.6331 - val_loss: 3.2000 - val_acc: 0.1221 Epoch 83/200 2227/2227 [==============================] - 0s 169us/sample - loss: 0.8791 - acc: 0.6255 - val_loss: 3.0979 - val_acc: 0.1221 Epoch 84/200 2227/2227 [==============================] - 0s 162us/sample - loss: 0.8871 - acc: 0.6273 - val_loss: 3.4153 - val_acc: 0.1221 Epoch 85/200 2227/2227 [==============================] - 0s 166us/sample - loss: 0.8923 - acc: 0.6228 - val_loss: 3.3456 - val_acc: 0.1185 Epoch 86/200 2227/2227 [==============================] - 0s 158us/sample - loss: 0.8758 - acc: 0.6354 - val_loss: 3.3381 - val_acc: 0.1167 Epoch 87/200 2227/2227 [==============================] - 0s 176us/sample - loss: 0.8711 - acc: 0.6448 - val_loss: 3.3348 - val_acc: 0.1239 Epoch 88/200 2227/2227 [==============================] - 0s 198us/sample - loss: 0.8810 - acc: 0.6421 - val_loss: 3.3381 - val_acc: 0.1005 Epoch 89/200 2227/2227 [==============================] - 0s 163us/sample - loss: 0.8891 - acc: 0.6331 - val_loss: 3.2295 - val_acc: 0.1185 Epoch 90/200 2227/2227 [==============================] - 0s 154us/sample - loss: 0.8768 - acc: 0.6255 - val_loss: 3.0305 - val_acc: 0.1185 Epoch 91/200 2227/2227 [==============================] - 0s 159us/sample - loss: 0.8848 - acc: 0.6403 - val_loss: 3.1111 - val_acc: 0.1221 Epoch 92/200 2227/2227 [==============================] - 0s 158us/sample - loss: 0.8520 - acc: 0.6381 - val_loss: 3.1552 - val_acc: 0.1221 Epoch 93/200 2227/2227 [==============================] - 0s 164us/sample - loss: 0.8560 - acc: 0.6462 - val_loss: 3.1637 - val_acc: 0.1221 Epoch 94/200 2227/2227 [==============================] - 0s 166us/sample - loss: 0.8736 - acc: 0.6394 - val_loss: 3.0123 - val_acc: 0.1167 Epoch 95/200 2227/2227 [==============================] - 0s 153us/sample - loss: 0.8560 - acc: 0.6381 - val_loss: 2.9069 - val_acc: 0.1059 Epoch 96/200 2227/2227 [==============================] - 0s 166us/sample - loss: 0.8572 - acc: 0.6354 - val_loss: 2.9702 - val_acc: 0.1239 Epoch 97/200 2227/2227 [==============================] - 0s 169us/sample - loss: 0.8647 - acc: 0.6417 - val_loss: 3.0383 - val_acc: 0.1239 Epoch 98/200 2227/2227 [==============================] - 0s 176us/sample - loss: 0.8642 - acc: 0.6354 - val_loss: 3.2123 - val_acc: 0.1221 Epoch 99/200 2227/2227 [==============================] - 0s 167us/sample - loss: 0.8428 - acc: 0.6462 - val_loss: 3.1870 - val_acc: 0.1293 Epoch 100/200 2227/2227 [==============================] - 0s 175us/sample - loss: 0.8772 - acc: 0.6313 - val_loss: 3.1356 - val_acc: 0.1293 Epoch 101/200 2227/2227 [==============================] - 0s 170us/sample - loss: 0.8577 - acc: 0.6331 - val_loss: 3.1263 - val_acc: 0.1005 Epoch 102/200 2227/2227 [==============================] - 0s 194us/sample - loss: 0.8658 - acc: 0.6354 - val_loss: 3.1035 - val_acc: 0.1257 Epoch 103/200 2227/2227 [==============================] - 0s 181us/sample - loss: 0.8560 - acc: 0.6475 - val_loss: 3.1159 - val_acc: 0.1239 Epoch 104/200 2227/2227 [==============================] - 0s 170us/sample - loss: 0.8688 - acc: 0.6372 - val_loss: 3.0149 - val_acc: 0.1185 Epoch 105/200 2227/2227 [==============================] - 0s 190us/sample - loss: 0.8458 - acc: 0.6489 - val_loss: 3.0316 - val_acc: 0.1185 Epoch 106/200 2227/2227 [==============================] - 0s 192us/sample - loss: 0.8656 - acc: 0.6426 - val_loss: 3.0824 - val_acc: 0.1185 Epoch 107/200 2227/2227 [==============================] - 0s 220us/sample - loss: 0.8620 - acc: 0.6466 - val_loss: 2.9834 - val_acc: 0.1203 Epoch 108/200 2227/2227 [==============================] - 0s 189us/sample - loss: 0.8515 - acc: 0.6399 - val_loss: 2.9796 - val_acc: 0.1257 Epoch 109/200 2227/2227 [==============================] - 0s 156us/sample - loss: 0.8573 - acc: 0.6466 - val_loss: 3.1150 - val_acc: 0.1167 Epoch 110/200 2227/2227 [==============================] - 0s 153us/sample - loss: 0.8487 - acc: 0.6403 - val_loss: 3.0509 - val_acc: 0.1257 Epoch 111/200 2227/2227 [==============================] - 0s 150us/sample - loss: 0.8424 - acc: 0.6399 - val_loss: 3.0173 - val_acc: 0.1203 Epoch 112/200 2227/2227 [==============================] - 0s 154us/sample - loss: 0.8398 - acc: 0.6399 - val_loss: 2.9130 - val_acc: 0.1077 Epoch 113/200 2227/2227 [==============================] - 0s 153us/sample - loss: 0.8391 - acc: 0.6556 - val_loss: 2.8357 - val_acc: 0.1167 Epoch 114/200 2227/2227 [==============================] - 0s 167us/sample - loss: 0.8306 - acc: 0.6471 - val_loss: 2.9391 - val_acc: 0.1203 Epoch 115/200 2227/2227 [==============================] - 0s 179us/sample - loss: 0.8407 - acc: 0.6408 - val_loss: 3.0049 - val_acc: 0.1257 Epoch 116/200 2227/2227 [==============================] - 0s 151us/sample - loss: 0.8454 - acc: 0.6340 - val_loss: 2.9501 - val_acc: 0.1239 Epoch 117/200 2227/2227 [==============================] - 0s 159us/sample - loss: 0.8428 - acc: 0.6484 - val_loss: 2.8989 - val_acc: 0.1239 Epoch 118/200 2227/2227 [==============================] - 0s 150us/sample - loss: 0.8299 - acc: 0.6502 - val_loss: 2.8866 - val_acc: 0.1257 Epoch 119/200 2227/2227 [==============================] - 0s 149us/sample - loss: 0.8310 - acc: 0.6596 - val_loss: 2.8615 - val_acc: 0.1239 Epoch 120/200 2227/2227 [==============================] - 0s 154us/sample - loss: 0.8366 - acc: 0.6493 - val_loss: 2.9820 - val_acc: 0.1221 Epoch 121/200 2227/2227 [==============================] - 0s 154us/sample - loss: 0.8372 - acc: 0.6489 - val_loss: 2.9054 - val_acc: 0.1185 Epoch 122/200 2227/2227 [==============================] - 0s 151us/sample - loss: 0.8539 - acc: 0.6363 - val_loss: 3.0316 - val_acc: 0.1185 Epoch 123/200 2227/2227 [==============================] - 0s 152us/sample - loss: 0.8532 - acc: 0.6471 - val_loss: 2.8192 - val_acc: 0.1095 Epoch 124/200 2227/2227 [==============================] - 0s 152us/sample - loss: 0.8533 - acc: 0.6426 - val_loss: 3.0235 - val_acc: 0.1149 Epoch 125/200 2227/2227 [==============================] - 0s 149us/sample - loss: 0.8287 - acc: 0.6682 - val_loss: 2.9674 - val_acc: 0.1293 Epoch 126/200 2227/2227 [==============================] - 0s 154us/sample - loss: 0.8416 - acc: 0.6412 - val_loss: 2.8795 - val_acc: 0.1239 Epoch 127/200 2227/2227 [==============================] - 0s 149us/sample - loss: 0.8412 - acc: 0.6502 - val_loss: 2.9158 - val_acc: 0.1113 Epoch 128/200 2227/2227 [==============================] - 0s 154us/sample - loss: 0.8264 - acc: 0.6547 - val_loss: 2.9904 - val_acc: 0.1293 Epoch 129/200 2227/2227 [==============================] - 0s 152us/sample - loss: 0.8155 - acc: 0.6578 - val_loss: 3.0083 - val_acc: 0.1239 Epoch 130/200 2227/2227 [==============================] - 0s 177us/sample - loss: 0.8140 - acc: 0.6682 - val_loss: 3.0040 - val_acc: 0.1239 Epoch 131/200 2227/2227 [==============================] - 0s 152us/sample - loss: 0.8225 - acc: 0.6646 - val_loss: 2.9898 - val_acc: 0.1275 Epoch 132/200 2227/2227 [==============================] - 0s 158us/sample - loss: 0.8162 - acc: 0.6641 - val_loss: 2.9370 - val_acc: 0.1257 Epoch 133/200 2227/2227 [==============================] - 0s 157us/sample - loss: 0.8160 - acc: 0.6637 - val_loss: 2.8954 - val_acc: 0.1005 Epoch 134/200 2227/2227 [==============================] - 0s 152us/sample - loss: 0.8328 - acc: 0.6587 - val_loss: 3.0009 - val_acc: 0.1239 Epoch 135/200 2227/2227 [==============================] - 0s 152us/sample - loss: 0.8273 - acc: 0.6587 - val_loss: 3.0034 - val_acc: 0.1311 Epoch 136/200 2227/2227 [==============================] - 0s 164us/sample - loss: 0.8290 - acc: 0.6551 - val_loss: 2.9249 - val_acc: 0.1275 Epoch 137/200 2227/2227 [==============================] - 0s 181us/sample - loss: 0.8316 - acc: 0.6493 - val_loss: 2.9559 - val_acc: 0.1041 Epoch 138/200 2227/2227 [==============================] - 0s 155us/sample - loss: 0.8217 - acc: 0.6605 - val_loss: 2.9895 - val_acc: 0.1293 Epoch 139/200 2227/2227 [==============================] - 0s 151us/sample - loss: 0.8249 - acc: 0.6480 - val_loss: 2.7943 - val_acc: 0.1275 Epoch 140/200 2227/2227 [==============================] - 0s 153us/sample - loss: 0.8146 - acc: 0.6610 - val_loss: 2.8363 - val_acc: 0.1239 Epoch 141/200 2227/2227 [==============================] - 0s 151us/sample - loss: 0.8217 - acc: 0.6587 - val_loss: 2.8709 - val_acc: 0.1167 Epoch 142/200 2227/2227 [==============================] - 0s 159us/sample - loss: 0.8185 - acc: 0.6569 - val_loss: 2.9683 - val_acc: 0.1293 Epoch 143/200 2227/2227 [==============================] - 0s 154us/sample - loss: 0.8185 - acc: 0.6551 - val_loss: 2.8750 - val_acc: 0.1185 Epoch 144/200 2227/2227 [==============================] - 0s 165us/sample - loss: 0.8155 - acc: 0.6637 - val_loss: 2.8270 - val_acc: 0.1203 Epoch 145/200 2227/2227 [==============================] - 0s 172us/sample - loss: 0.8283 - acc: 0.6475 - val_loss: 2.8632 - val_acc: 0.1203 Epoch 146/200 2227/2227 [==============================] - 0s 150us/sample - loss: 0.8315 - acc: 0.6520 - val_loss: 2.6817 - val_acc: 0.1185 Epoch 147/200 2227/2227 [==============================] - 0s 153us/sample - loss: 0.8157 - acc: 0.6628 - val_loss: 2.8681 - val_acc: 0.1113 Epoch 148/200 2227/2227 [==============================] - 0s 157us/sample - loss: 0.8044 - acc: 0.6605 - val_loss: 2.9011 - val_acc: 0.1203 Epoch 149/200 2227/2227 [==============================] - 0s 151us/sample - loss: 0.8050 - acc: 0.6610 - val_loss: 2.9383 - val_acc: 0.1293 Epoch 150/200 2227/2227 [==============================] - 0s 153us/sample - loss: 0.8122 - acc: 0.6502 - val_loss: 2.8995 - val_acc: 0.1275 Epoch 151/200 2227/2227 [==============================] - 0s 157us/sample - loss: 0.8159 - acc: 0.6556 - val_loss: 2.7998 - val_acc: 0.1185 Epoch 152/200 2227/2227 [==============================] - 0s 172us/sample - loss: 0.7730 - acc: 0.6794 - val_loss: 2.8705 - val_acc: 0.1293 Epoch 153/200 2227/2227 [==============================] - 0s 176us/sample - loss: 0.8144 - acc: 0.6475 - val_loss: 2.8275 - val_acc: 0.1293 Epoch 154/200 2227/2227 [==============================] - 0s 161us/sample - loss: 0.7984 - acc: 0.6767 - val_loss: 2.8616 - val_acc: 0.1239 Epoch 155/200 2227/2227 [==============================] - 0s 155us/sample - loss: 0.8164 - acc: 0.6601 - val_loss: 2.9443 - val_acc: 0.1293 Epoch 156/200 2227/2227 [==============================] - 0s 154us/sample - loss: 0.8123 - acc: 0.6677 - val_loss: 2.6194 - val_acc: 0.1203 Epoch 157/200 2227/2227 [==============================] - 0s 156us/sample - loss: 0.8279 - acc: 0.6556 - val_loss: 2.7184 - val_acc: 0.1041 Epoch 158/200 2227/2227 [==============================] - 0s 148us/sample - loss: 0.8118 - acc: 0.6605 - val_loss: 2.8679 - val_acc: 0.1059 Epoch 159/200 2227/2227 [==============================] - 0s 186us/sample - loss: 0.8046 - acc: 0.6682 - val_loss: 2.8418 - val_acc: 0.1257 Epoch 160/200 2227/2227 [==============================] - 0s 173us/sample - loss: 0.8266 - acc: 0.6659 - val_loss: 2.7558 - val_acc: 0.1167 Epoch 161/200 2227/2227 [==============================] - 0s 177us/sample - loss: 0.8051 - acc: 0.6587 - val_loss: 2.8520 - val_acc: 0.1275 Epoch 162/200 2227/2227 [==============================] - 0s 176us/sample - loss: 0.7785 - acc: 0.6614 - val_loss: 2.8475 - val_acc: 0.1149 Epoch 163/200 2227/2227 [==============================] - 0s 176us/sample - loss: 0.8265 - acc: 0.6583 - val_loss: 2.7857 - val_acc: 0.1275 Epoch 164/200 2227/2227 [==============================] - 0s 185us/sample - loss: 0.8044 - acc: 0.6704 - val_loss: 2.8654 - val_acc: 0.1185 Epoch 165/200 2227/2227 [==============================] - 0s 193us/sample - loss: 0.8053 - acc: 0.6632 - val_loss: 2.8156 - val_acc: 0.1203 Epoch 166/200 2227/2227 [==============================] - 0s 173us/sample - loss: 0.7950 - acc: 0.6691 - val_loss: 2.7596 - val_acc: 0.1275 Epoch 167/200 2227/2227 [==============================] - 0s 163us/sample - loss: 0.7975 - acc: 0.6736 - val_loss: 2.8095 - val_acc: 0.1131 Epoch 168/200 2227/2227 [==============================] - 0s 151us/sample - loss: 0.7907 - acc: 0.6686 - val_loss: 2.8828 - val_acc: 0.1185 Epoch 169/200 2227/2227 [==============================] - 0s 153us/sample - loss: 0.8003 - acc: 0.6673 - val_loss: 2.8672 - val_acc: 0.1311 Epoch 170/200 2227/2227 [==============================] - 0s 164us/sample - loss: 0.7911 - acc: 0.6727 - val_loss: 2.8573 - val_acc: 0.1311 Epoch 171/200 2227/2227 [==============================] - 0s 165us/sample - loss: 0.7900 - acc: 0.6780 - val_loss: 2.9256 - val_acc: 0.1221 Epoch 172/200 2227/2227 [==============================] - 0s 207us/sample - loss: 0.7774 - acc: 0.6776 - val_loss: 2.9193 - val_acc: 0.1203 Epoch 173/200 2227/2227 [==============================] - 0s 180us/sample - loss: 0.8055 - acc: 0.6596 - val_loss: 2.8148 - val_acc: 0.0862 Epoch 174/200 2227/2227 [==============================] - 0s 179us/sample - loss: 0.8049 - acc: 0.6789 - val_loss: 2.9088 - val_acc: 0.1221 Epoch 175/200 2227/2227 [==============================] - 0s 198us/sample - loss: 0.8079 - acc: 0.6587 - val_loss: 2.9178 - val_acc: 0.1311 Epoch 176/200 2227/2227 [==============================] - 1s 291us/sample - loss: 0.7927 - acc: 0.6614 - val_loss: 2.8788 - val_acc: 0.1185 Epoch 177/200 2227/2227 [==============================] - 1s 400us/sample - loss: 0.8078 - acc: 0.6677 - val_loss: 2.9075 - val_acc: 0.1311 Epoch 178/200 2227/2227 [==============================] - 1s 342us/sample - loss: 0.7894 - acc: 0.6664 - val_loss: 2.8090 - val_acc: 0.1239 Epoch 179/200 2227/2227 [==============================] - 1s 407us/sample - loss: 0.8075 - acc: 0.6551 - val_loss: 2.8345 - val_acc: 0.1293 Epoch 180/200 2227/2227 [==============================] - 0s 179us/sample - loss: 0.7876 - acc: 0.6641 - val_loss: 2.9235 - val_acc: 0.1311 Epoch 181/200 2227/2227 [==============================] - 0s 222us/sample - loss: 0.8286 - acc: 0.6628 - val_loss: 2.9205 - val_acc: 0.1293 Epoch 182/200 2227/2227 [==============================] - 0s 181us/sample - loss: 0.7988 - acc: 0.6691 - val_loss: 2.8784 - val_acc: 0.1239 Epoch 183/200 2227/2227 [==============================] - 0s 189us/sample - loss: 0.8018 - acc: 0.6596 - val_loss: 2.8504 - val_acc: 0.1239 Epoch 184/200 2227/2227 [==============================] - 0s 185us/sample - loss: 0.7874 - acc: 0.6830 - val_loss: 2.8611 - val_acc: 0.1185 Epoch 185/200 2227/2227 [==============================] - 0s 181us/sample - loss: 0.7935 - acc: 0.6740 - val_loss: 2.8955 - val_acc: 0.1311 Epoch 186/200 2227/2227 [==============================] - 0s 175us/sample - loss: 0.7826 - acc: 0.6700 - val_loss: 2.8213 - val_acc: 0.1346 Epoch 187/200 2227/2227 [==============================] - 0s 178us/sample - loss: 0.8087 - acc: 0.6578 - val_loss: 2.8016 - val_acc: 0.1221 Epoch 188/200 2227/2227 [==============================] - 1s 234us/sample - loss: 0.7804 - acc: 0.6691 - val_loss: 2.9093 - val_acc: 0.1329 Epoch 189/200 2227/2227 [==============================] - 0s 190us/sample - loss: 0.7811 - acc: 0.6731 - val_loss: 2.9330 - val_acc: 0.1239 Epoch 190/200 2227/2227 [==============================] - 0s 172us/sample - loss: 0.8024 - acc: 0.6780 - val_loss: 2.7618 - val_acc: 0.1203 Epoch 191/200 2227/2227 [==============================] - 0s 152us/sample - loss: 0.8023 - acc: 0.6704 - val_loss: 2.8070 - val_acc: 0.1221 Epoch 192/200 2227/2227 [==============================] - 0s 159us/sample - loss: 0.7928 - acc: 0.6619 - val_loss: 2.8738 - val_acc: 0.1257 Epoch 193/200 2227/2227 [==============================] - 0s 189us/sample - loss: 0.8010 - acc: 0.6807 - val_loss: 2.8967 - val_acc: 0.1185 Epoch 194/200 2227/2227 [==============================] - 0s 150us/sample - loss: 0.7853 - acc: 0.6767 - val_loss: 2.9613 - val_acc: 0.1346 Epoch 195/200 2227/2227 [==============================] - 0s 153us/sample - loss: 0.7692 - acc: 0.6713 - val_loss: 2.8624 - val_acc: 0.1329 Epoch 196/200 2227/2227 [==============================] - 0s 152us/sample - loss: 0.8088 - acc: 0.6686 - val_loss: 2.8228 - val_acc: 0.1221 Epoch 197/200 2227/2227 [==============================] - 0s 153us/sample - loss: 0.7747 - acc: 0.6727 - val_loss: 2.9361 - val_acc: 0.1329 Epoch 198/200 2227/2227 [==============================] - 0s 157us/sample - loss: 0.7799 - acc: 0.6740 - val_loss: 2.9793 - val_acc: 0.1329 Epoch 199/200 2227/2227 [==============================] - 0s 154us/sample - loss: 0.7815 - acc: 0.6731 - val_loss: 2.7779 - val_acc: 0.1329 Epoch 200/200 2227/2227 [==============================] - 0s 151us/sample - loss: 0.7658 - acc: 0.6731 - val_loss: 2.7771 - val_acc: 0.1221
print(history.history.keys())
plt.plot(history.history['loss'])
plt.plot(history.history['val_loss'])
plt.title('model loss')
plt.ylabel('loss')
plt.xlabel('epoch')
plt.legend(['train', 'validation'], loc='upper left')
plt.show()
dict_keys(['loss', 'acc', 'val_loss', 'val_acc'])
y_pred = model1.predict_classes(X_test)
y_classes = [np.argmax(y, axis=None, out=None) for y in y_test]
from sklearn.metrics import classification_report
print(classification_report(y_pred,y_classes))
precision recall f1-score support
3 0.00 0.00 0.00 5
4 0.47 0.09 0.15 100
5 0.47 0.66 0.55 156
6 0.41 0.52 0.46 168
7 0.61 0.43 0.51 99
8 0.00 0.00 0.00 0
accuracy 0.46 528
macro avg 0.33 0.28 0.28 528
weighted avg 0.48 0.46 0.43 528
for column in Kbest_features.drop(columns=['Signal_Strength']).columns:
Kbest_features[column] = Kbest_features[column].apply(lambda x:
(x - Kbest_features[column].mean())/Kbest_features[column].std()
)
categorical_labels = to_categorical(Kbest_features['Signal_Strength'], num_classes=None)
X = Kbest_features.drop(columns=['Signal_Strength']).values
Y = categorical_labels
X_train, X_test, y_train, y_test = train_test_split(X, Y,test_size=0.33, random_state=42)
model_k = Sequential()
model_k.add(Dense(32, input_dim=8, kernel_initializer='normal', activation='relu'))
model_k.add(Dropout(0.5))
model_k.add(Dense(64, activation='relu'))
model_k.add(Dropout(0.5))
model_k.add(Dense(9, activation='softmax'))
model_k.summary()
Model: "sequential_24" _________________________________________________________________ Layer (type) Output Shape Param # ================================================================= dense_81 (Dense) (None, 32) 288 _________________________________________________________________ dropout_57 (Dropout) (None, 32) 0 _________________________________________________________________ dense_82 (Dense) (None, 64) 2112 _________________________________________________________________ dropout_58 (Dropout) (None, 64) 0 _________________________________________________________________ dense_83 (Dense) (None, 9) 585 ================================================================= Total params: 2,985 Trainable params: 2,985 Non-trainable params: 0 _________________________________________________________________
model_k.compile(loss='categorical_crossentropy', optimizer='adam', metrics=['accuracy'])
history = model_k.fit(X_train, y_train, epochs=150, batch_size=10, verbose=1, validation_split=0.2)
Train on 856 samples, validate on 215 samples Epoch 1/150 856/856 [==============================] - 1s 759us/sample - loss: 1.9172 - acc: 0.3832 - val_loss: 1.5205 - val_acc: 0.3767 Epoch 2/150 856/856 [==============================] - 0s 167us/sample - loss: 1.3300 - acc: 0.4626 - val_loss: 1.1418 - val_acc: 0.5953 Epoch 3/150 856/856 [==============================] - 0s 179us/sample - loss: 1.1827 - acc: 0.4977 - val_loss: 1.0345 - val_acc: 0.6372 Epoch 4/150 856/856 [==============================] - 0s 192us/sample - loss: 1.1280 - acc: 0.5491 - val_loss: 0.9867 - val_acc: 0.6326 Epoch 5/150 856/856 [==============================] - 0s 180us/sample - loss: 1.1016 - acc: 0.5549 - val_loss: 0.9572 - val_acc: 0.6558 Epoch 6/150 856/856 [==============================] - 0s 172us/sample - loss: 1.0473 - acc: 0.5783 - val_loss: 0.9303 - val_acc: 0.6512 Epoch 7/150 856/856 [==============================] - 0s 168us/sample - loss: 1.0909 - acc: 0.5502 - val_loss: 0.9269 - val_acc: 0.6558 Epoch 8/150 856/856 [==============================] - 0s 248us/sample - loss: 1.0707 - acc: 0.5409 - val_loss: 0.9102 - val_acc: 0.6698 Epoch 9/150 856/856 [==============================] - 0s 160us/sample - loss: 1.0194 - acc: 0.5666 - val_loss: 0.8943 - val_acc: 0.6698 Epoch 10/150 856/856 [==============================] - 0s 178us/sample - loss: 1.0707 - acc: 0.5549 - val_loss: 0.9067 - val_acc: 0.6512 Epoch 11/150 856/856 [==============================] - 0s 161us/sample - loss: 1.0461 - acc: 0.5584 - val_loss: 0.8996 - val_acc: 0.6698 Epoch 12/150 856/856 [==============================] - 0s 168us/sample - loss: 1.0311 - acc: 0.5736 - val_loss: 0.8940 - val_acc: 0.6791 Epoch 13/150 856/856 [==============================] - 0s 167us/sample - loss: 1.0348 - acc: 0.5619 - val_loss: 0.8997 - val_acc: 0.6698 Epoch 14/150 856/856 [==============================] - 0s 171us/sample - loss: 1.0303 - acc: 0.5771 - val_loss: 0.8944 - val_acc: 0.6837 Epoch 15/150 856/856 [==============================] - 0s 168us/sample - loss: 1.0154 - acc: 0.5864 - val_loss: 0.8950 - val_acc: 0.6744 Epoch 16/150 856/856 [==============================] - 0s 167us/sample - loss: 1.0052 - acc: 0.5876 - val_loss: 0.8869 - val_acc: 0.6837 Epoch 17/150 856/856 [==============================] - 0s 222us/sample - loss: 1.0233 - acc: 0.5864 - val_loss: 0.8843 - val_acc: 0.6884 Epoch 18/150 856/856 [==============================] - 0s 181us/sample - loss: 1.0195 - acc: 0.5654 - val_loss: 0.8816 - val_acc: 0.6791 Epoch 19/150 856/856 [==============================] - 0s 174us/sample - loss: 1.0156 - acc: 0.5876 - val_loss: 0.8796 - val_acc: 0.6605 Epoch 20/150 856/856 [==============================] - 0s 172us/sample - loss: 1.0091 - acc: 0.5829 - val_loss: 0.8827 - val_acc: 0.6698 Epoch 21/150 856/856 [==============================] - 0s 179us/sample - loss: 1.0218 - acc: 0.5806 - val_loss: 0.8825 - val_acc: 0.6744 Epoch 22/150 856/856 [==============================] - 0s 171us/sample - loss: 0.9988 - acc: 0.5935 - val_loss: 0.8855 - val_acc: 0.6651 Epoch 23/150 856/856 [==============================] - 0s 167us/sample - loss: 0.9996 - acc: 0.5993 - val_loss: 0.8699 - val_acc: 0.6651 Epoch 24/150 856/856 [==============================] - 0s 172us/sample - loss: 0.9965 - acc: 0.5900 - val_loss: 0.8705 - val_acc: 0.6791 Epoch 25/150 856/856 [==============================] - 0s 214us/sample - loss: 1.0127 - acc: 0.5724 - val_loss: 0.8773 - val_acc: 0.6837 Epoch 26/150 856/856 [==============================] - 0s 197us/sample - loss: 1.0051 - acc: 0.5864 - val_loss: 0.8857 - val_acc: 0.6698 Epoch 27/150 856/856 [==============================] - 0s 168us/sample - loss: 0.9996 - acc: 0.5958 - val_loss: 0.8692 - val_acc: 0.6977 Epoch 28/150 856/856 [==============================] - 0s 172us/sample - loss: 1.0127 - acc: 0.5748 - val_loss: 0.8654 - val_acc: 0.6930 Epoch 29/150 856/856 [==============================] - 0s 199us/sample - loss: 0.9971 - acc: 0.5864 - val_loss: 0.8654 - val_acc: 0.6791 Epoch 30/150 856/856 [==============================] - 0s 173us/sample - loss: 1.0019 - acc: 0.5888 - val_loss: 0.8705 - val_acc: 0.6837 Epoch 31/150 856/856 [==============================] - 0s 181us/sample - loss: 1.0117 - acc: 0.5736 - val_loss: 0.8744 - val_acc: 0.6791 Epoch 32/150 856/856 [==============================] - 0s 197us/sample - loss: 0.9872 - acc: 0.5806 - val_loss: 0.8639 - val_acc: 0.6977 Epoch 33/150 856/856 [==============================] - 0s 186us/sample - loss: 1.0086 - acc: 0.6051 - val_loss: 0.8658 - val_acc: 0.6837 Epoch 34/150 856/856 [==============================] - 0s 178us/sample - loss: 0.9885 - acc: 0.5783 - val_loss: 0.8660 - val_acc: 0.6744 Epoch 35/150 856/856 [==============================] - 0s 175us/sample - loss: 0.9957 - acc: 0.6040 - val_loss: 0.8638 - val_acc: 0.6837 Epoch 36/150 856/856 [==============================] - 0s 159us/sample - loss: 0.9906 - acc: 0.5888 - val_loss: 0.8612 - val_acc: 0.6884 Epoch 37/150 856/856 [==============================] - 0s 169us/sample - loss: 1.0071 - acc: 0.6063 - val_loss: 0.8697 - val_acc: 0.6930 Epoch 38/150 856/856 [==============================] - 0s 164us/sample - loss: 0.9696 - acc: 0.5935 - val_loss: 0.8650 - val_acc: 0.6977 Epoch 39/150 856/856 [==============================] - 0s 179us/sample - loss: 0.9971 - acc: 0.5841 - val_loss: 0.8679 - val_acc: 0.6884 Epoch 40/150 856/856 [==============================] - 0s 167us/sample - loss: 0.9681 - acc: 0.6086 - val_loss: 0.8629 - val_acc: 0.7116 Epoch 41/150 856/856 [==============================] - 0s 271us/sample - loss: 0.9777 - acc: 0.5900 - val_loss: 0.8637 - val_acc: 0.6744 Epoch 42/150 856/856 [==============================] - 0s 202us/sample - loss: 0.9832 - acc: 0.5993 - val_loss: 0.8632 - val_acc: 0.6791 Epoch 43/150 856/856 [==============================] - 0s 169us/sample - loss: 0.9809 - acc: 0.6028 - val_loss: 0.8509 - val_acc: 0.6930 Epoch 44/150 856/856 [==============================] - 0s 174us/sample - loss: 0.9913 - acc: 0.6051 - val_loss: 0.8547 - val_acc: 0.6884 Epoch 45/150 856/856 [==============================] - 0s 173us/sample - loss: 0.9835 - acc: 0.5923 - val_loss: 0.8525 - val_acc: 0.6930 Epoch 46/150 856/856 [==============================] - 0s 186us/sample - loss: 0.9794 - acc: 0.6016 - val_loss: 0.8545 - val_acc: 0.6837 Epoch 47/150 856/856 [==============================] - 0s 304us/sample - loss: 0.9843 - acc: 0.5818 - val_loss: 0.8582 - val_acc: 0.6884 Epoch 48/150 856/856 [==============================] - 0s 190us/sample - loss: 0.9719 - acc: 0.5970 - val_loss: 0.8602 - val_acc: 0.6791 Epoch 49/150 856/856 [==============================] - 0s 204us/sample - loss: 0.9635 - acc: 0.5841 - val_loss: 0.8520 - val_acc: 0.6884 Epoch 50/150 856/856 [==============================] - 0s 196us/sample - loss: 0.9778 - acc: 0.5958 - val_loss: 0.8615 - val_acc: 0.6884 Epoch 51/150 856/856 [==============================] - 0s 236us/sample - loss: 0.9719 - acc: 0.5993 - val_loss: 0.8644 - val_acc: 0.6930 Epoch 52/150 856/856 [==============================] - 0s 179us/sample - loss: 0.9675 - acc: 0.5981 - val_loss: 0.8568 - val_acc: 0.6977 Epoch 53/150 856/856 [==============================] - 0s 172us/sample - loss: 0.9749 - acc: 0.6028 - val_loss: 0.8608 - val_acc: 0.6930 Epoch 54/150 856/856 [==============================] - 0s 161us/sample - loss: 0.9569 - acc: 0.6075 - val_loss: 0.8532 - val_acc: 0.6930 Epoch 55/150 856/856 [==============================] - 0s 169us/sample - loss: 0.9605 - acc: 0.5923 - val_loss: 0.8535 - val_acc: 0.6930 Epoch 56/150 856/856 [==============================] - 0s 151us/sample - loss: 0.9529 - acc: 0.5853 - val_loss: 0.8463 - val_acc: 0.7023 Epoch 57/150 856/856 [==============================] - 0s 158us/sample - loss: 0.9651 - acc: 0.5841 - val_loss: 0.8498 - val_acc: 0.7116 Epoch 58/150 856/856 [==============================] - 0s 154us/sample - loss: 0.9638 - acc: 0.6075 - val_loss: 0.8567 - val_acc: 0.6977 Epoch 59/150 856/856 [==============================] - 0s 176us/sample - loss: 0.9710 - acc: 0.5888 - val_loss: 0.8528 - val_acc: 0.6744 Epoch 60/150 856/856 [==============================] - 0s 209us/sample - loss: 0.9706 - acc: 0.6051 - val_loss: 0.8541 - val_acc: 0.6977 Epoch 61/150 856/856 [==============================] - 0s 157us/sample - loss: 0.9567 - acc: 0.5993 - val_loss: 0.8493 - val_acc: 0.6837 Epoch 62/150 856/856 [==============================] - 0s 210us/sample - loss: 0.9625 - acc: 0.5958 - val_loss: 0.8413 - val_acc: 0.6977 Epoch 63/150 856/856 [==============================] - 0s 183us/sample - loss: 0.9482 - acc: 0.6016 - val_loss: 0.8442 - val_acc: 0.6930 Epoch 64/150 856/856 [==============================] - 0s 200us/sample - loss: 0.9584 - acc: 0.6005 - val_loss: 0.8424 - val_acc: 0.7023 Epoch 65/150 856/856 [==============================] - 0s 162us/sample - loss: 0.9620 - acc: 0.6005 - val_loss: 0.8546 - val_acc: 0.6791 Epoch 66/150 856/856 [==============================] - 0s 171us/sample - loss: 0.9620 - acc: 0.6051 - val_loss: 0.8455 - val_acc: 0.6930 Epoch 67/150 856/856 [==============================] - 0s 236us/sample - loss: 0.9387 - acc: 0.5946 - val_loss: 0.8442 - val_acc: 0.6930 Epoch 68/150 856/856 [==============================] - 0s 169us/sample - loss: 0.9639 - acc: 0.6063 - val_loss: 0.8511 - val_acc: 0.6884 Epoch 69/150 856/856 [==============================] - 0s 158us/sample - loss: 0.9636 - acc: 0.6028 - val_loss: 0.8508 - val_acc: 0.6977 Epoch 70/150 856/856 [==============================] - 0s 158us/sample - loss: 0.9642 - acc: 0.6005 - val_loss: 0.8567 - val_acc: 0.7023 Epoch 71/150 856/856 [==============================] - 0s 154us/sample - loss: 0.9707 - acc: 0.6063 - val_loss: 0.8618 - val_acc: 0.7070 Epoch 72/150 856/856 [==============================] - 0s 197us/sample - loss: 0.9647 - acc: 0.5911 - val_loss: 0.8699 - val_acc: 0.6558 Epoch 73/150 856/856 [==============================] - 0s 231us/sample - loss: 0.9780 - acc: 0.5876 - val_loss: 0.8608 - val_acc: 0.7023 Epoch 74/150 856/856 [==============================] - 0s 169us/sample - loss: 0.9581 - acc: 0.5946 - val_loss: 0.8608 - val_acc: 0.6884 Epoch 75/150 856/856 [==============================] - 0s 173us/sample - loss: 0.9587 - acc: 0.5864 - val_loss: 0.8623 - val_acc: 0.6837 Epoch 76/150 856/856 [==============================] - 0s 172us/sample - loss: 0.9441 - acc: 0.5911 - val_loss: 0.8548 - val_acc: 0.6837 Epoch 77/150 856/856 [==============================] - 0s 180us/sample - loss: 0.9638 - acc: 0.5993 - val_loss: 0.8607 - val_acc: 0.6651 Epoch 78/150 856/856 [==============================] - 0s 169us/sample - loss: 0.9598 - acc: 0.6051 - val_loss: 0.8584 - val_acc: 0.6791 Epoch 79/150 856/856 [==============================] - 0s 193us/sample - loss: 0.9427 - acc: 0.6063 - val_loss: 0.8539 - val_acc: 0.6884 Epoch 80/150 856/856 [==============================] - 0s 174us/sample - loss: 0.9646 - acc: 0.5946 - val_loss: 0.8580 - val_acc: 0.6837 Epoch 81/150 856/856 [==============================] - 0s 168us/sample - loss: 0.9548 - acc: 0.6016 - val_loss: 0.8522 - val_acc: 0.6884 Epoch 82/150 856/856 [==============================] - 0s 181us/sample - loss: 0.9607 - acc: 0.5911 - val_loss: 0.8599 - val_acc: 0.6977 Epoch 83/150 856/856 [==============================] - 0s 183us/sample - loss: 0.9510 - acc: 0.6063 - val_loss: 0.8566 - val_acc: 0.6651 Epoch 84/150 856/856 [==============================] - 0s 172us/sample - loss: 0.9485 - acc: 0.5958 - val_loss: 0.8548 - val_acc: 0.6744 Epoch 85/150 856/856 [==============================] - 0s 174us/sample - loss: 0.9541 - acc: 0.6075 - val_loss: 0.8524 - val_acc: 0.6977 Epoch 86/150 856/856 [==============================] - 0s 158us/sample - loss: 0.9591 - acc: 0.5970 - val_loss: 0.8544 - val_acc: 0.6930 Epoch 87/150 856/856 [==============================] - 0s 199us/sample - loss: 0.9381 - acc: 0.5981 - val_loss: 0.8563 - val_acc: 0.6884 Epoch 88/150 856/856 [==============================] - 0s 171us/sample - loss: 0.9443 - acc: 0.6121 - val_loss: 0.8565 - val_acc: 0.6744 Epoch 89/150 856/856 [==============================] - 0s 180us/sample - loss: 0.9700 - acc: 0.6005 - val_loss: 0.8537 - val_acc: 0.6791 Epoch 90/150 856/856 [==============================] - 0s 179us/sample - loss: 0.9353 - acc: 0.6121 - val_loss: 0.8536 - val_acc: 0.6791 Epoch 91/150 856/856 [==============================] - 0s 155us/sample - loss: 0.9404 - acc: 0.6063 - val_loss: 0.8495 - val_acc: 0.6791 Epoch 92/150 856/856 [==============================] - 0s 179us/sample - loss: 0.9416 - acc: 0.5981 - val_loss: 0.8492 - val_acc: 0.6930 Epoch 93/150 856/856 [==============================] - 0s 196us/sample - loss: 0.9558 - acc: 0.6086 - val_loss: 0.8501 - val_acc: 0.6977 Epoch 94/150 856/856 [==============================] - 0s 188us/sample - loss: 0.9410 - acc: 0.5935 - val_loss: 0.8515 - val_acc: 0.7023 Epoch 95/150 856/856 [==============================] - 0s 178us/sample - loss: 0.9452 - acc: 0.6040 - val_loss: 0.8516 - val_acc: 0.6884 Epoch 96/150 856/856 [==============================] - 0s 193us/sample - loss: 0.9532 - acc: 0.6028 - val_loss: 0.8553 - val_acc: 0.6884 Epoch 97/150 856/856 [==============================] - 0s 175us/sample - loss: 0.9234 - acc: 0.6145 - val_loss: 0.8441 - val_acc: 0.6791 Epoch 98/150 856/856 [==============================] - 0s 161us/sample - loss: 0.9327 - acc: 0.6180 - val_loss: 0.8452 - val_acc: 0.6930 Epoch 99/150 856/856 [==============================] - 0s 196us/sample - loss: 0.9548 - acc: 0.6343 - val_loss: 0.8442 - val_acc: 0.6930 Epoch 100/150 856/856 [==============================] - 0s 174us/sample - loss: 0.9453 - acc: 0.6133 - val_loss: 0.8474 - val_acc: 0.6977 Epoch 101/150 856/856 [==============================] - 0s 159us/sample - loss: 0.9457 - acc: 0.6145 - val_loss: 0.8520 - val_acc: 0.6837 Epoch 102/150 856/856 [==============================] - 0s 180us/sample - loss: 0.9471 - acc: 0.5900 - val_loss: 0.8552 - val_acc: 0.6744 Epoch 103/150 856/856 [==============================] - 0s 175us/sample - loss: 0.9476 - acc: 0.6121 - val_loss: 0.8531 - val_acc: 0.6977 Epoch 104/150 856/856 [==============================] - 0s 180us/sample - loss: 0.9348 - acc: 0.6016 - val_loss: 0.8406 - val_acc: 0.6930 Epoch 105/150 856/856 [==============================] - 0s 188us/sample - loss: 0.9496 - acc: 0.6016 - val_loss: 0.8431 - val_acc: 0.6930 Epoch 106/150 856/856 [==============================] - 0s 227us/sample - loss: 0.9560 - acc: 0.5993 - val_loss: 0.8475 - val_acc: 0.6837 Epoch 107/150 856/856 [==============================] - 0s 207us/sample - loss: 0.9467 - acc: 0.6063 - val_loss: 0.8521 - val_acc: 0.6698 Epoch 108/150 856/856 [==============================] - 0s 165us/sample - loss: 0.9459 - acc: 0.6098 - val_loss: 0.8498 - val_acc: 0.6837 Epoch 109/150 856/856 [==============================] - 0s 193us/sample - loss: 0.9380 - acc: 0.6110 - val_loss: 0.8481 - val_acc: 0.6837 Epoch 110/150 856/856 [==============================] - 0s 176us/sample - loss: 0.9190 - acc: 0.6157 - val_loss: 0.8486 - val_acc: 0.6837 Epoch 111/150 856/856 [==============================] - 0s 161us/sample - loss: 0.9436 - acc: 0.6180 - val_loss: 0.8523 - val_acc: 0.6837 Epoch 112/150 856/856 [==============================] - 0s 190us/sample - loss: 0.9437 - acc: 0.6121 - val_loss: 0.8510 - val_acc: 0.6744 Epoch 113/150 856/856 [==============================] - 0s 172us/sample - loss: 0.9616 - acc: 0.5993 - val_loss: 0.8554 - val_acc: 0.6837 Epoch 114/150 856/856 [==============================] - 0s 169us/sample - loss: 0.9333 - acc: 0.6262 - val_loss: 0.8559 - val_acc: 0.6930 Epoch 115/150 856/856 [==============================] - 0s 189us/sample - loss: 0.9463 - acc: 0.6016 - val_loss: 0.8519 - val_acc: 0.6977 Epoch 116/150 856/856 [==============================] - 0s 172us/sample - loss: 0.9216 - acc: 0.5993 - val_loss: 0.8495 - val_acc: 0.6930 Epoch 117/150 856/856 [==============================] - 0s 166us/sample - loss: 0.9199 - acc: 0.6227 - val_loss: 0.8449 - val_acc: 0.7023 Epoch 118/150 856/856 [==============================] - 0s 185us/sample - loss: 0.9562 - acc: 0.6075 - val_loss: 0.8557 - val_acc: 0.6791 Epoch 119/150 856/856 [==============================] - 0s 176us/sample - loss: 0.9398 - acc: 0.6110 - val_loss: 0.8461 - val_acc: 0.6744 Epoch 120/150 856/856 [==============================] - 0s 180us/sample - loss: 0.9427 - acc: 0.6180 - val_loss: 0.8496 - val_acc: 0.6791 Epoch 121/150 856/856 [==============================] - 0s 173us/sample - loss: 0.9376 - acc: 0.6086 - val_loss: 0.8506 - val_acc: 0.6744 Epoch 122/150 856/856 [==============================] - 0s 168us/sample - loss: 0.9443 - acc: 0.6180 - val_loss: 0.8426 - val_acc: 0.6837 Epoch 123/150 856/856 [==============================] - 0s 173us/sample - loss: 0.9215 - acc: 0.6262 - val_loss: 0.8454 - val_acc: 0.6837 Epoch 124/150 856/856 [==============================] - 0s 185us/sample - loss: 0.9168 - acc: 0.6086 - val_loss: 0.8461 - val_acc: 0.7023 Epoch 125/150 856/856 [==============================] - 0s 167us/sample - loss: 0.9398 - acc: 0.6110 - val_loss: 0.8507 - val_acc: 0.6930 Epoch 126/150 856/856 [==============================] - 0s 188us/sample - loss: 0.9453 - acc: 0.6063 - val_loss: 0.8475 - val_acc: 0.6837 Epoch 127/150 856/856 [==============================] - 0s 181us/sample - loss: 0.9431 - acc: 0.6051 - val_loss: 0.8504 - val_acc: 0.6744 Epoch 128/150 856/856 [==============================] - 0s 160us/sample - loss: 0.9341 - acc: 0.6086 - val_loss: 0.8450 - val_acc: 0.6884 Epoch 129/150 856/856 [==============================] - 0s 176us/sample - loss: 0.9528 - acc: 0.6051 - val_loss: 0.8559 - val_acc: 0.6698 Epoch 130/150 856/856 [==============================] - 0s 169us/sample - loss: 0.9355 - acc: 0.6005 - val_loss: 0.8533 - val_acc: 0.6744 Epoch 131/150 856/856 [==============================] - 0s 167us/sample - loss: 0.9119 - acc: 0.6285 - val_loss: 0.8485 - val_acc: 0.6884 Epoch 132/150 856/856 [==============================] - 0s 181us/sample - loss: 0.9585 - acc: 0.6133 - val_loss: 0.8486 - val_acc: 0.6791 Epoch 133/150 856/856 [==============================] - 0s 180us/sample - loss: 0.9499 - acc: 0.6086 - val_loss: 0.8436 - val_acc: 0.7023 Epoch 134/150 856/856 [==============================] - 0s 190us/sample - loss: 0.9415 - acc: 0.6110 - val_loss: 0.8492 - val_acc: 0.6884 Epoch 135/150 856/856 [==============================] - 0s 171us/sample - loss: 0.9117 - acc: 0.6168 - val_loss: 0.8455 - val_acc: 0.6930 Epoch 136/150 856/856 [==============================] - 0s 162us/sample - loss: 0.9155 - acc: 0.5981 - val_loss: 0.8507 - val_acc: 0.6698 Epoch 137/150 856/856 [==============================] - 0s 187us/sample - loss: 0.9273 - acc: 0.6098 - val_loss: 0.8464 - val_acc: 0.6837 Epoch 138/150 856/856 [==============================] - 0s 171us/sample - loss: 0.9230 - acc: 0.6133 - val_loss: 0.8513 - val_acc: 0.6791 Epoch 139/150 856/856 [==============================] - 0s 245us/sample - loss: 0.9465 - acc: 0.6086 - val_loss: 0.8528 - val_acc: 0.6837 Epoch 140/150 856/856 [==============================] - 0s 172us/sample - loss: 0.9302 - acc: 0.6192 - val_loss: 0.8593 - val_acc: 0.6791 Epoch 141/150 856/856 [==============================] - 0s 173us/sample - loss: 0.9278 - acc: 0.6308 - val_loss: 0.8493 - val_acc: 0.6744 Epoch 142/150 856/856 [==============================] - 0s 160us/sample - loss: 0.9254 - acc: 0.6285 - val_loss: 0.8480 - val_acc: 0.6698 Epoch 143/150 856/856 [==============================] - 0s 196us/sample - loss: 0.9280 - acc: 0.6157 - val_loss: 0.8510 - val_acc: 0.6744 Epoch 144/150 856/856 [==============================] - 0s 173us/sample - loss: 0.9275 - acc: 0.6145 - val_loss: 0.8548 - val_acc: 0.6698 Epoch 145/150 856/856 [==============================] - 0s 158us/sample - loss: 0.9259 - acc: 0.5970 - val_loss: 0.8573 - val_acc: 0.6837 Epoch 146/150 856/856 [==============================] - 0s 183us/sample - loss: 0.9397 - acc: 0.6168 - val_loss: 0.8618 - val_acc: 0.6698 Epoch 147/150 856/856 [==============================] - 0s 180us/sample - loss: 0.9288 - acc: 0.6215 - val_loss: 0.8615 - val_acc: 0.6837 Epoch 148/150 856/856 [==============================] - 0s 203us/sample - loss: 0.9164 - acc: 0.6098 - val_loss: 0.8476 - val_acc: 0.6744 Epoch 149/150 856/856 [==============================] - 0s 206us/sample - loss: 0.9429 - acc: 0.6086 - val_loss: 0.8579 - val_acc: 0.6791 Epoch 150/150 856/856 [==============================] - 0s 178us/sample - loss: 0.9221 - acc: 0.6086 - val_loss: 0.8534 - val_acc: 0.6744
print(history.history.keys())
plt.plot(history.history['loss'])
plt.plot(history.history['val_loss'])
plt.title('model loss')
plt.ylabel('loss')
plt.xlabel('epoch')
plt.legend(['train', 'validation'], loc='upper left')
plt.show()
dict_keys(['loss', 'acc', 'val_loss', 'val_acc'])
y_pred = model_k.predict_classes(X_test)
y_classes = [np.argmax(y, axis=None, out=None) for y in y_test]
from sklearn.metrics import classification_report
print(classification_report(y_pred,y_classes))
precision recall f1-score support
3 0.00 0.00 0.00 0
4 0.00 0.00 0.00 0
5 0.80 0.62 0.70 280
6 0.47 0.54 0.50 188
7 0.39 0.45 0.42 60
8 0.00 0.00 0.00 0
accuracy 0.57 528
macro avg 0.28 0.27 0.27 528
weighted avg 0.64 0.57 0.60 528
Conclusion:
df2 = df1.copy()
def categorise_signal(row):
if row >= 7:
return 3
elif row <=4:
return 1
else:
return 2
df2['Signal_Strength'] = df2['Signal_Strength'].apply(categorise_signal)
df2['Signal_Strength'].value_counts()
2 1319 3 217 1 63 Name: Signal_Strength, dtype: int64
sns.countplot(y='Signal_Strength',hue='Signal_Strength', data=df2)
<matplotlib.axes._subplots.AxesSubplot at 0x1000b6688>
def create_model(input_dim,output_dim):
model = Sequential()
model.add(Dense(32, input_dim=input_dim, kernel_initializer='normal', activation='relu'))
model.add(Dropout(0.5))
model.add(Dense(64, activation='relu'))
model.add(Dropout(0.5))
model.add(Dense(output_dim, activation='softmax'))
print(model.summary())
return model
finalDf['Signal_Strength'] = finalDf['Signal_Strength'].apply(categorise_signal)
sns.countplot(y='Signal_Strength',hue='Signal_Strength', data=finalDf)
<matplotlib.axes._subplots.AxesSubplot at 0x1000b0bc8>
from keras.utils.np_utils import to_categorical
categorical_labels = to_categorical(finalDf['Signal_Strength'], num_classes=None)
xpca = finalDf.drop(columns=['Signal_Strength']).values
ypca = categorical_labels
X_train, X_test, y_train, y_test = train_test_split(xpca, ypca,test_size=0.33, random_state=42)
y_train_classes = [np.argmax(y, axis=None, out=None) for y in y_train]
sm = SMOTE(random_state=2)
X_train_res,y_train_res = sm.fit_sample(X_train,y_train_classes)
y_train_res = to_categorical(y_train_res,num_classes=None)
model = create_model(6,4)
Model: "sequential_27" _________________________________________________________________ Layer (type) Output Shape Param # ================================================================= dense_90 (Dense) (None, 32) 224 _________________________________________________________________ dropout_63 (Dropout) (None, 32) 0 _________________________________________________________________ dense_91 (Dense) (None, 64) 2112 _________________________________________________________________ dropout_64 (Dropout) (None, 64) 0 _________________________________________________________________ dense_92 (Dense) (None, 4) 260 ================================================================= Total params: 2,596 Trainable params: 2,596 Non-trainable params: 0 _________________________________________________________________ None
model.compile(loss='categorical_crossentropy', optimizer='adam', metrics=['accuracy'])
history = model.fit(X_train, y_train, epochs=150, batch_size=10, verbose=1, validation_split=0.2)
Train on 856 samples, validate on 215 samples Epoch 1/150 856/856 [==============================] - 1s 815us/sample - loss: 1.1546 - acc: 0.6589 - val_loss: 0.7890 - val_acc: 0.8326 Epoch 2/150 856/856 [==============================] - 0s 202us/sample - loss: 0.6574 - acc: 0.8318 - val_loss: 0.5210 - val_acc: 0.8326 Epoch 3/150 856/856 [==============================] - 0s 209us/sample - loss: 0.5810 - acc: 0.8259 - val_loss: 0.4785 - val_acc: 0.8372 Epoch 4/150 856/856 [==============================] - 0s 164us/sample - loss: 0.5240 - acc: 0.8306 - val_loss: 0.4618 - val_acc: 0.8558 Epoch 5/150 856/856 [==============================] - 0s 238us/sample - loss: 0.4988 - acc: 0.8364 - val_loss: 0.4453 - val_acc: 0.8605 Epoch 6/150 856/856 [==============================] - 0s 162us/sample - loss: 0.5097 - acc: 0.8271 - val_loss: 0.4337 - val_acc: 0.8605 Epoch 7/150 856/856 [==============================] - 0s 168us/sample - loss: 0.4758 - acc: 0.8470 - val_loss: 0.4222 - val_acc: 0.8558 Epoch 8/150 856/856 [==============================] - 0s 168us/sample - loss: 0.4753 - acc: 0.8400 - val_loss: 0.4202 - val_acc: 0.8465 Epoch 9/150 856/856 [==============================] - 0s 171us/sample - loss: 0.4846 - acc: 0.8341 - val_loss: 0.4157 - val_acc: 0.8605 Epoch 10/150 856/856 [==============================] - 0s 227us/sample - loss: 0.4659 - acc: 0.8493 - val_loss: 0.4147 - val_acc: 0.8605 Epoch 11/150 856/856 [==============================] - 0s 167us/sample - loss: 0.4752 - acc: 0.8318 - val_loss: 0.4194 - val_acc: 0.8326 Epoch 12/150 856/856 [==============================] - 0s 167us/sample - loss: 0.4585 - acc: 0.8411 - val_loss: 0.4065 - val_acc: 0.8465 Epoch 13/150 856/856 [==============================] - 0s 169us/sample - loss: 0.4713 - acc: 0.8329 - val_loss: 0.4164 - val_acc: 0.8558 Epoch 14/150 856/856 [==============================] - 0s 186us/sample - loss: 0.4560 - acc: 0.8446 - val_loss: 0.4191 - val_acc: 0.8465 Epoch 15/150 856/856 [==============================] - 0s 221us/sample - loss: 0.4624 - acc: 0.8306 - val_loss: 0.4151 - val_acc: 0.8419 Epoch 16/150 856/856 [==============================] - 0s 180us/sample - loss: 0.4637 - acc: 0.8400 - val_loss: 0.4064 - val_acc: 0.8512 Epoch 17/150 856/856 [==============================] - 0s 169us/sample - loss: 0.4502 - acc: 0.8446 - val_loss: 0.4071 - val_acc: 0.8465 Epoch 18/150 856/856 [==============================] - 0s 179us/sample - loss: 0.4422 - acc: 0.8423 - val_loss: 0.4062 - val_acc: 0.8605 Epoch 19/150 856/856 [==============================] - 0s 167us/sample - loss: 0.4402 - acc: 0.8388 - val_loss: 0.4044 - val_acc: 0.8558 Epoch 20/150 856/856 [==============================] - 0s 201us/sample - loss: 0.4262 - acc: 0.8446 - val_loss: 0.4105 - val_acc: 0.8419 Epoch 21/150 856/856 [==============================] - 0s 215us/sample - loss: 0.4504 - acc: 0.8329 - val_loss: 0.4045 - val_acc: 0.8512 Epoch 22/150 856/856 [==============================] - 0s 229us/sample - loss: 0.4527 - acc: 0.8435 - val_loss: 0.4011 - val_acc: 0.8465 Epoch 23/150 856/856 [==============================] - 0s 181us/sample - loss: 0.4401 - acc: 0.8446 - val_loss: 0.4023 - val_acc: 0.8465 Epoch 24/150 856/856 [==============================] - 0s 165us/sample - loss: 0.4467 - acc: 0.8341 - val_loss: 0.4015 - val_acc: 0.8465 Epoch 25/150 856/856 [==============================] - 0s 180us/sample - loss: 0.4452 - acc: 0.8376 - val_loss: 0.4019 - val_acc: 0.8651 Epoch 26/150 856/856 [==============================] - 0s 166us/sample - loss: 0.4541 - acc: 0.8423 - val_loss: 0.4050 - val_acc: 0.8558 Epoch 27/150 856/856 [==============================] - 0s 173us/sample - loss: 0.4703 - acc: 0.8376 - val_loss: 0.4120 - val_acc: 0.8465 Epoch 28/150 856/856 [==============================] - 0s 195us/sample - loss: 0.4505 - acc: 0.8353 - val_loss: 0.4113 - val_acc: 0.8512 Epoch 29/150 856/856 [==============================] - 0s 183us/sample - loss: 0.4416 - acc: 0.8481 - val_loss: 0.4057 - val_acc: 0.8605 Epoch 30/150 856/856 [==============================] - 0s 167us/sample - loss: 0.4305 - acc: 0.8481 - val_loss: 0.4024 - val_acc: 0.8512 Epoch 31/150 856/856 [==============================] - 0s 176us/sample - loss: 0.4331 - acc: 0.8400 - val_loss: 0.4010 - val_acc: 0.8512 Epoch 32/150 856/856 [==============================] - 0s 194us/sample - loss: 0.4298 - acc: 0.8341 - val_loss: 0.3984 - val_acc: 0.8419 Epoch 33/150 856/856 [==============================] - 0s 224us/sample - loss: 0.4179 - acc: 0.8505 - val_loss: 0.3982 - val_acc: 0.8465 Epoch 34/150 856/856 [==============================] - 0s 172us/sample - loss: 0.4355 - acc: 0.8376 - val_loss: 0.3994 - val_acc: 0.8512 Epoch 35/150 856/856 [==============================] - 0s 169us/sample - loss: 0.4498 - acc: 0.8353 - val_loss: 0.4019 - val_acc: 0.8465 Epoch 36/150 856/856 [==============================] - 0s 174us/sample - loss: 0.4351 - acc: 0.8458 - val_loss: 0.3998 - val_acc: 0.8465 Epoch 37/150 856/856 [==============================] - 0s 203us/sample - loss: 0.4411 - acc: 0.8376 - val_loss: 0.4018 - val_acc: 0.8465 Epoch 38/150 856/856 [==============================] - 0s 190us/sample - loss: 0.4341 - acc: 0.8516 - val_loss: 0.3977 - val_acc: 0.8512 Epoch 39/150 856/856 [==============================] - 0s 196us/sample - loss: 0.4281 - acc: 0.8458 - val_loss: 0.3984 - val_acc: 0.8512 Epoch 40/150 856/856 [==============================] - 0s 207us/sample - loss: 0.4370 - acc: 0.8458 - val_loss: 0.4012 - val_acc: 0.8419 Epoch 41/150 856/856 [==============================] - 0s 164us/sample - loss: 0.4284 - acc: 0.8493 - val_loss: 0.4011 - val_acc: 0.8512 Epoch 42/150 856/856 [==============================] - 0s 169us/sample - loss: 0.4343 - acc: 0.8446 - val_loss: 0.3980 - val_acc: 0.8605 Epoch 43/150 856/856 [==============================] - 0s 168us/sample - loss: 0.4433 - acc: 0.8458 - val_loss: 0.3981 - val_acc: 0.8558 Epoch 44/150 856/856 [==============================] - 0s 181us/sample - loss: 0.4244 - acc: 0.8458 - val_loss: 0.3981 - val_acc: 0.8512 Epoch 45/150 856/856 [==============================] - 0s 196us/sample - loss: 0.4222 - acc: 0.8470 - val_loss: 0.3989 - val_acc: 0.8372 Epoch 46/150 856/856 [==============================] - 0s 197us/sample - loss: 0.4267 - acc: 0.8481 - val_loss: 0.3988 - val_acc: 0.8512 Epoch 47/150 856/856 [==============================] - 0s 179us/sample - loss: 0.4303 - acc: 0.8481 - val_loss: 0.3980 - val_acc: 0.8512 Epoch 48/150 856/856 [==============================] - 0s 168us/sample - loss: 0.4332 - acc: 0.8400 - val_loss: 0.4002 - val_acc: 0.8512 Epoch 49/150 856/856 [==============================] - 0s 224us/sample - loss: 0.4117 - acc: 0.8493 - val_loss: 0.3998 - val_acc: 0.8465 Epoch 50/150 856/856 [==============================] - 0s 189us/sample - loss: 0.4093 - acc: 0.8528 - val_loss: 0.3960 - val_acc: 0.8512 Epoch 51/150 856/856 [==============================] - 0s 194us/sample - loss: 0.4126 - acc: 0.8528 - val_loss: 0.3990 - val_acc: 0.8419 Epoch 52/150 856/856 [==============================] - 0s 172us/sample - loss: 0.4303 - acc: 0.8364 - val_loss: 0.3947 - val_acc: 0.8512 Epoch 53/150 856/856 [==============================] - 0s 179us/sample - loss: 0.4278 - acc: 0.8458 - val_loss: 0.3984 - val_acc: 0.8512 Epoch 54/150 856/856 [==============================] - 0s 168us/sample - loss: 0.4293 - acc: 0.8458 - val_loss: 0.3976 - val_acc: 0.8419 Epoch 55/150 856/856 [==============================] - 0s 180us/sample - loss: 0.4240 - acc: 0.8470 - val_loss: 0.3983 - val_acc: 0.8465 Epoch 56/150 856/856 [==============================] - 0s 159us/sample - loss: 0.4194 - acc: 0.8458 - val_loss: 0.3996 - val_acc: 0.8465 Epoch 57/150 856/856 [==============================] - 0s 181us/sample - loss: 0.4109 - acc: 0.8505 - val_loss: 0.3944 - val_acc: 0.8372 Epoch 58/150 856/856 [==============================] - 0s 174us/sample - loss: 0.4145 - acc: 0.8528 - val_loss: 0.3926 - val_acc: 0.8512 Epoch 59/150 856/856 [==============================] - 0s 185us/sample - loss: 0.4092 - acc: 0.8470 - val_loss: 0.3948 - val_acc: 0.8512 Epoch 60/150 856/856 [==============================] - 0s 164us/sample - loss: 0.4136 - acc: 0.8481 - val_loss: 0.3945 - val_acc: 0.8419 Epoch 61/150 856/856 [==============================] - 0s 172us/sample - loss: 0.4208 - acc: 0.8435 - val_loss: 0.3967 - val_acc: 0.8465 Epoch 62/150 856/856 [==============================] - 0s 228us/sample - loss: 0.4415 - acc: 0.8423 - val_loss: 0.3978 - val_acc: 0.8465 Epoch 63/150 856/856 [==============================] - 0s 164us/sample - loss: 0.4108 - acc: 0.8435 - val_loss: 0.3915 - val_acc: 0.8512 Epoch 64/150 856/856 [==============================] - 0s 154us/sample - loss: 0.4105 - acc: 0.8540 - val_loss: 0.3945 - val_acc: 0.8465 Epoch 65/150 856/856 [==============================] - 0s 230us/sample - loss: 0.4190 - acc: 0.8376 - val_loss: 0.3906 - val_acc: 0.8419 Epoch 66/150 856/856 [==============================] - 0s 172us/sample - loss: 0.4183 - acc: 0.8470 - val_loss: 0.3879 - val_acc: 0.8558 Epoch 67/150 856/856 [==============================] - 0s 157us/sample - loss: 0.4115 - acc: 0.8516 - val_loss: 0.3933 - val_acc: 0.8372 Epoch 68/150 856/856 [==============================] - 0s 167us/sample - loss: 0.4128 - acc: 0.8458 - val_loss: 0.3894 - val_acc: 0.8512 Epoch 69/150 856/856 [==============================] - 0s 154us/sample - loss: 0.4105 - acc: 0.8481 - val_loss: 0.3946 - val_acc: 0.8512 Epoch 70/150 856/856 [==============================] - 0s 290us/sample - loss: 0.4118 - acc: 0.8528 - val_loss: 0.3895 - val_acc: 0.8465 Epoch 71/150 856/856 [==============================] - 0s 168us/sample - loss: 0.4257 - acc: 0.8423 - val_loss: 0.3946 - val_acc: 0.8512 Epoch 72/150 856/856 [==============================] - 0s 181us/sample - loss: 0.4020 - acc: 0.8435 - val_loss: 0.3947 - val_acc: 0.8419 Epoch 73/150 856/856 [==============================] - 0s 165us/sample - loss: 0.4230 - acc: 0.8470 - val_loss: 0.3978 - val_acc: 0.8465 Epoch 74/150 856/856 [==============================] - 0s 162us/sample - loss: 0.4122 - acc: 0.8423 - val_loss: 0.3950 - val_acc: 0.8372 Epoch 75/150 856/856 [==============================] - 0s 160us/sample - loss: 0.4205 - acc: 0.8400 - val_loss: 0.3898 - val_acc: 0.8419 Epoch 76/150 856/856 [==============================] - 0s 158us/sample - loss: 0.4041 - acc: 0.8481 - val_loss: 0.3900 - val_acc: 0.8465 Epoch 77/150 856/856 [==============================] - 0s 159us/sample - loss: 0.4129 - acc: 0.8411 - val_loss: 0.3888 - val_acc: 0.8512 Epoch 78/150 856/856 [==============================] - 0s 165us/sample - loss: 0.4295 - acc: 0.8423 - val_loss: 0.3956 - val_acc: 0.8558 Epoch 79/150 856/856 [==============================] - 0s 162us/sample - loss: 0.4251 - acc: 0.8458 - val_loss: 0.3943 - val_acc: 0.8465 Epoch 80/150 856/856 [==============================] - 0s 161us/sample - loss: 0.4134 - acc: 0.8411 - val_loss: 0.3967 - val_acc: 0.8512 Epoch 81/150 856/856 [==============================] - 0s 164us/sample - loss: 0.4110 - acc: 0.8458 - val_loss: 0.3901 - val_acc: 0.8512 Epoch 82/150 856/856 [==============================] - 0s 175us/sample - loss: 0.4176 - acc: 0.8493 - val_loss: 0.3888 - val_acc: 0.8465 Epoch 83/150 856/856 [==============================] - 0s 210us/sample - loss: 0.4240 - acc: 0.8516 - val_loss: 0.3903 - val_acc: 0.8512 Epoch 84/150 856/856 [==============================] - 0s 209us/sample - loss: 0.4138 - acc: 0.8388 - val_loss: 0.3931 - val_acc: 0.8419 Epoch 85/150 856/856 [==============================] - 0s 200us/sample - loss: 0.4107 - acc: 0.8411 - val_loss: 0.3906 - val_acc: 0.8419 Epoch 86/150 856/856 [==============================] - 0s 197us/sample - loss: 0.3946 - acc: 0.8621 - val_loss: 0.3915 - val_acc: 0.8419 Epoch 87/150 856/856 [==============================] - 0s 179us/sample - loss: 0.4012 - acc: 0.8470 - val_loss: 0.3921 - val_acc: 0.8419 Epoch 88/150 856/856 [==============================] - 0s 162us/sample - loss: 0.3880 - acc: 0.8505 - val_loss: 0.3878 - val_acc: 0.8512 Epoch 89/150 856/856 [==============================] - 0s 164us/sample - loss: 0.3967 - acc: 0.8586 - val_loss: 0.3907 - val_acc: 0.8465 Epoch 90/150 856/856 [==============================] - 0s 162us/sample - loss: 0.4089 - acc: 0.8528 - val_loss: 0.3902 - val_acc: 0.8512 Epoch 91/150 856/856 [==============================] - 0s 176us/sample - loss: 0.4141 - acc: 0.8400 - val_loss: 0.3900 - val_acc: 0.8558 Epoch 92/150 856/856 [==============================] - 0s 171us/sample - loss: 0.4066 - acc: 0.8540 - val_loss: 0.3904 - val_acc: 0.8419 Epoch 93/150 856/856 [==============================] - 0s 176us/sample - loss: 0.3972 - acc: 0.8458 - val_loss: 0.3928 - val_acc: 0.8558 Epoch 94/150 856/856 [==============================] - 0s 180us/sample - loss: 0.4100 - acc: 0.8516 - val_loss: 0.3907 - val_acc: 0.8465 Epoch 95/150 856/856 [==============================] - 0s 174us/sample - loss: 0.4033 - acc: 0.8551 - val_loss: 0.3934 - val_acc: 0.8605 Epoch 96/150 856/856 [==============================] - 0s 176us/sample - loss: 0.4028 - acc: 0.8540 - val_loss: 0.3903 - val_acc: 0.8512 Epoch 97/150 856/856 [==============================] - 0s 174us/sample - loss: 0.4197 - acc: 0.8435 - val_loss: 0.3942 - val_acc: 0.8558 Epoch 98/150 856/856 [==============================] - 0s 174us/sample - loss: 0.4012 - acc: 0.8516 - val_loss: 0.3923 - val_acc: 0.8512 Epoch 99/150 856/856 [==============================] - 0s 165us/sample - loss: 0.4000 - acc: 0.8516 - val_loss: 0.3951 - val_acc: 0.8558 Epoch 100/150 856/856 [==============================] - 0s 171us/sample - loss: 0.3966 - acc: 0.8528 - val_loss: 0.3948 - val_acc: 0.8558 Epoch 101/150 856/856 [==============================] - 0s 242us/sample - loss: 0.3959 - acc: 0.8540 - val_loss: 0.3879 - val_acc: 0.8512 Epoch 102/150 856/856 [==============================] - 0s 265us/sample - loss: 0.4015 - acc: 0.8563 - val_loss: 0.3940 - val_acc: 0.8372 Epoch 103/150 856/856 [==============================] - 0s 252us/sample - loss: 0.4031 - acc: 0.8633 - val_loss: 0.3929 - val_acc: 0.8512 Epoch 104/150 856/856 [==============================] - 0s 204us/sample - loss: 0.3963 - acc: 0.8505 - val_loss: 0.3853 - val_acc: 0.8465 Epoch 105/150 856/856 [==============================] - 0s 166us/sample - loss: 0.3801 - acc: 0.8586 - val_loss: 0.3938 - val_acc: 0.8605 Epoch 106/150 856/856 [==============================] - 0s 180us/sample - loss: 0.3974 - acc: 0.8551 - val_loss: 0.3974 - val_acc: 0.8512 Epoch 107/150 856/856 [==============================] - 0s 161us/sample - loss: 0.4069 - acc: 0.8388 - val_loss: 0.3933 - val_acc: 0.8512 Epoch 108/150 856/856 [==============================] - 0s 168us/sample - loss: 0.4038 - acc: 0.8505 - val_loss: 0.3926 - val_acc: 0.8465 Epoch 109/150 856/856 [==============================] - 0s 185us/sample - loss: 0.3899 - acc: 0.8551 - val_loss: 0.3955 - val_acc: 0.8512 Epoch 110/150 856/856 [==============================] - 0s 171us/sample - loss: 0.3883 - acc: 0.8575 - val_loss: 0.3904 - val_acc: 0.8558 Epoch 111/150 856/856 [==============================] - 0s 181us/sample - loss: 0.4074 - acc: 0.8446 - val_loss: 0.3894 - val_acc: 0.8512 Epoch 112/150 856/856 [==============================] - 0s 185us/sample - loss: 0.3940 - acc: 0.8481 - val_loss: 0.3889 - val_acc: 0.8512 Epoch 113/150 856/856 [==============================] - 0s 178us/sample - loss: 0.4009 - acc: 0.8575 - val_loss: 0.3887 - val_acc: 0.8558 Epoch 114/150 856/856 [==============================] - 0s 175us/sample - loss: 0.3958 - acc: 0.8528 - val_loss: 0.3930 - val_acc: 0.8512 Epoch 115/150 856/856 [==============================] - 0s 180us/sample - loss: 0.4002 - acc: 0.8528 - val_loss: 0.3894 - val_acc: 0.8558 Epoch 116/150 856/856 [==============================] - 0s 200us/sample - loss: 0.3981 - acc: 0.8493 - val_loss: 0.3908 - val_acc: 0.8419 Epoch 117/150 856/856 [==============================] - 0s 195us/sample - loss: 0.4052 - acc: 0.8423 - val_loss: 0.3939 - val_acc: 0.8372 Epoch 118/150 856/856 [==============================] - 0s 185us/sample - loss: 0.3883 - acc: 0.8551 - val_loss: 0.3897 - val_acc: 0.8558 Epoch 119/150 856/856 [==============================] - 0s 188us/sample - loss: 0.3888 - acc: 0.8516 - val_loss: 0.3891 - val_acc: 0.8558 Epoch 120/150 856/856 [==============================] - 0s 169us/sample - loss: 0.3809 - acc: 0.8551 - val_loss: 0.3905 - val_acc: 0.8512 Epoch 121/150 856/856 [==============================] - 0s 165us/sample - loss: 0.3894 - acc: 0.8446 - val_loss: 0.3901 - val_acc: 0.8605 Epoch 122/150 856/856 [==============================] - 0s 173us/sample - loss: 0.4069 - acc: 0.8563 - val_loss: 0.3871 - val_acc: 0.8651 Epoch 123/150 856/856 [==============================] - 0s 173us/sample - loss: 0.3918 - acc: 0.8563 - val_loss: 0.3863 - val_acc: 0.8744 Epoch 124/150 856/856 [==============================] - 0s 176us/sample - loss: 0.3945 - acc: 0.8516 - val_loss: 0.3886 - val_acc: 0.8651 Epoch 125/150 856/856 [==============================] - 0s 162us/sample - loss: 0.3871 - acc: 0.8598 - val_loss: 0.3886 - val_acc: 0.8512 Epoch 126/150 856/856 [==============================] - 0s 164us/sample - loss: 0.4055 - acc: 0.8435 - val_loss: 0.3909 - val_acc: 0.8465 Epoch 127/150 856/856 [==============================] - 0s 157us/sample - loss: 0.3960 - acc: 0.8505 - val_loss: 0.3875 - val_acc: 0.8651 Epoch 128/150 856/856 [==============================] - 0s 167us/sample - loss: 0.3786 - acc: 0.8645 - val_loss: 0.3938 - val_acc: 0.8558 Epoch 129/150 856/856 [==============================] - 0s 171us/sample - loss: 0.4063 - acc: 0.8446 - val_loss: 0.3908 - val_acc: 0.8558 Epoch 130/150 856/856 [==============================] - 0s 160us/sample - loss: 0.3875 - acc: 0.8540 - val_loss: 0.3917 - val_acc: 0.8512 Epoch 131/150 856/856 [==============================] - 0s 161us/sample - loss: 0.3831 - acc: 0.8481 - val_loss: 0.3898 - val_acc: 0.8512 Epoch 132/150 856/856 [==============================] - 0s 168us/sample - loss: 0.3903 - acc: 0.8505 - val_loss: 0.3874 - val_acc: 0.8512 Epoch 133/150 856/856 [==============================] - 0s 164us/sample - loss: 0.3751 - acc: 0.8505 - val_loss: 0.3844 - val_acc: 0.8512 Epoch 134/150 856/856 [==============================] - 0s 162us/sample - loss: 0.3793 - acc: 0.8692 - val_loss: 0.3930 - val_acc: 0.8512 Epoch 135/150 856/856 [==============================] - 0s 162us/sample - loss: 0.3929 - acc: 0.8598 - val_loss: 0.3892 - val_acc: 0.8558 Epoch 136/150 856/856 [==============================] - 0s 241us/sample - loss: 0.3783 - acc: 0.8610 - val_loss: 0.3879 - val_acc: 0.8465 Epoch 137/150 856/856 [==============================] - 0s 154us/sample - loss: 0.3861 - acc: 0.8516 - val_loss: 0.3881 - val_acc: 0.8558 Epoch 138/150 856/856 [==============================] - 0s 168us/sample - loss: 0.3784 - acc: 0.8610 - val_loss: 0.3926 - val_acc: 0.8419 Epoch 139/150 856/856 [==============================] - 0s 190us/sample - loss: 0.4029 - acc: 0.8528 - val_loss: 0.3911 - val_acc: 0.8512 Epoch 140/150 856/856 [==============================] - 0s 193us/sample - loss: 0.3915 - acc: 0.8423 - val_loss: 0.3927 - val_acc: 0.8465 Epoch 141/150 856/856 [==============================] - 0s 187us/sample - loss: 0.3819 - acc: 0.8586 - val_loss: 0.3983 - val_acc: 0.8512 Epoch 142/150 856/856 [==============================] - 0s 174us/sample - loss: 0.3888 - acc: 0.8575 - val_loss: 0.3925 - val_acc: 0.8558 Epoch 143/150 856/856 [==============================] - 0s 175us/sample - loss: 0.3718 - acc: 0.8610 - val_loss: 0.3914 - val_acc: 0.8512 Epoch 144/150 856/856 [==============================] - 0s 174us/sample - loss: 0.3851 - acc: 0.8645 - val_loss: 0.3939 - val_acc: 0.8605 Epoch 145/150 856/856 [==============================] - 0s 172us/sample - loss: 0.3770 - acc: 0.8551 - val_loss: 0.3926 - val_acc: 0.8512 Epoch 146/150 856/856 [==============================] - 0s 181us/sample - loss: 0.3919 - acc: 0.8586 - val_loss: 0.3967 - val_acc: 0.8465 Epoch 147/150 856/856 [==============================] - 0s 166us/sample - loss: 0.3901 - acc: 0.8551 - val_loss: 0.3922 - val_acc: 0.8465 Epoch 148/150 856/856 [==============================] - 0s 178us/sample - loss: 0.3821 - acc: 0.8586 - val_loss: 0.3892 - val_acc: 0.8512 Epoch 149/150 856/856 [==============================] - 0s 180us/sample - loss: 0.3872 - acc: 0.8505 - val_loss: 0.3943 - val_acc: 0.8419 Epoch 150/150 856/856 [==============================] - 0s 171us/sample - loss: 0.3915 - acc: 0.8575 - val_loss: 0.3948 - val_acc: 0.8465
print(history.history.keys())
plt.plot(history.history['loss'])
plt.plot(history.history['val_loss'])
plt.title('model loss')
plt.ylabel('loss')
plt.xlabel('epoch')
plt.legend(['train', 'validation'], loc='upper left')
plt.show()
dict_keys(['loss', 'acc', 'val_loss', 'val_acc'])
y_pred = model.predict_classes(X_test)
y_classes = [np.argmax(y, axis=None, out=None) for y in y_test]
print(classification_report(y_pred,y_classes))
precision recall f1-score support
1 0.00 0.00 0.00 0
2 0.94 0.85 0.89 474
3 0.36 0.52 0.43 54
accuracy 0.82 528
macro avg 0.43 0.46 0.44 528
weighted avg 0.88 0.82 0.85 528
model_s = create_model(6,4)
Model: "sequential_30" _________________________________________________________________ Layer (type) Output Shape Param # ================================================================= dense_99 (Dense) (None, 32) 224 _________________________________________________________________ dropout_69 (Dropout) (None, 32) 0 _________________________________________________________________ dense_100 (Dense) (None, 64) 2112 _________________________________________________________________ dropout_70 (Dropout) (None, 64) 0 _________________________________________________________________ dense_101 (Dense) (None, 4) 260 ================================================================= Total params: 2,596 Trainable params: 2,596 Non-trainable params: 0 _________________________________________________________________ None
model_s.compile(loss='categorical_crossentropy', optimizer='adam', metrics=['accuracy'])
history = model_s.fit(X_train_res, y_train_res, epochs=150, batch_size=10, verbose=1, validation_split=0.2)
Train on 2133 samples, validate on 534 samples Epoch 1/150 2133/2133 [==============================] - 1s 410us/sample - loss: 1.0997 - acc: 0.4552 - val_loss: 1.0309 - val_acc: 0.6461 Epoch 2/150 2133/2133 [==============================] - 0s 161us/sample - loss: 0.8112 - acc: 0.6010 - val_loss: 0.7800 - val_acc: 0.7097 Epoch 3/150 2133/2133 [==============================] - 0s 171us/sample - loss: 0.7646 - acc: 0.6057 - val_loss: 0.7869 - val_acc: 0.6891 Epoch 4/150 2133/2133 [==============================] - 0s 158us/sample - loss: 0.7566 - acc: 0.6278 - val_loss: 0.7468 - val_acc: 0.7116 Epoch 5/150 2133/2133 [==============================] - 0s 166us/sample - loss: 0.7297 - acc: 0.6385 - val_loss: 0.7945 - val_acc: 0.7041 Epoch 6/150 2133/2133 [==============================] - 0s 159us/sample - loss: 0.7195 - acc: 0.6362 - val_loss: 0.7181 - val_acc: 0.7378 Epoch 7/150 2133/2133 [==============================] - 0s 180us/sample - loss: 0.6941 - acc: 0.6535 - val_loss: 0.7557 - val_acc: 0.7172 Epoch 8/150 2133/2133 [==============================] - 0s 197us/sample - loss: 0.6884 - acc: 0.6610 - val_loss: 0.6971 - val_acc: 0.7416 Epoch 9/150 2133/2133 [==============================] - 0s 165us/sample - loss: 0.6815 - acc: 0.6803 - val_loss: 0.7323 - val_acc: 0.7191 Epoch 10/150 2133/2133 [==============================] - 0s 173us/sample - loss: 0.6802 - acc: 0.6742 - val_loss: 0.6916 - val_acc: 0.7416 Epoch 11/150 2133/2133 [==============================] - 0s 164us/sample - loss: 0.6693 - acc: 0.6695 - val_loss: 0.6525 - val_acc: 0.7772 Epoch 12/150 2133/2133 [==============================] - 0s 232us/sample - loss: 0.6763 - acc: 0.6793 - val_loss: 0.6986 - val_acc: 0.7491 Epoch 13/150 2133/2133 [==============================] - 0s 193us/sample - loss: 0.6543 - acc: 0.6765 - val_loss: 0.6802 - val_acc: 0.7491 Epoch 14/150 2133/2133 [==============================] - 0s 162us/sample - loss: 0.6478 - acc: 0.6967 - val_loss: 0.7414 - val_acc: 0.7322 Epoch 15/150 2133/2133 [==============================] - 0s 166us/sample - loss: 0.6527 - acc: 0.6821 - val_loss: 0.7699 - val_acc: 0.7172 Epoch 16/150 2133/2133 [==============================] - 0s 175us/sample - loss: 0.6421 - acc: 0.7042 - val_loss: 0.7591 - val_acc: 0.7247 Epoch 17/150 2133/2133 [==============================] - 0s 177us/sample - loss: 0.6408 - acc: 0.6953 - val_loss: 0.7064 - val_acc: 0.7341 Epoch 18/150 2133/2133 [==============================] - 0s 223us/sample - loss: 0.6457 - acc: 0.6915 - val_loss: 0.6982 - val_acc: 0.7772 Epoch 19/150 2133/2133 [==============================] - 0s 216us/sample - loss: 0.6376 - acc: 0.7032 - val_loss: 0.7039 - val_acc: 0.7528 Epoch 20/150 2133/2133 [==============================] - 0s 162us/sample - loss: 0.6289 - acc: 0.6995 - val_loss: 0.5903 - val_acc: 0.8333 Epoch 21/150 2133/2133 [==============================] - 0s 171us/sample - loss: 0.6279 - acc: 0.7065 - val_loss: 0.6738 - val_acc: 0.7790 Epoch 22/150 2133/2133 [==============================] - 0s 163us/sample - loss: 0.6339 - acc: 0.6953 - val_loss: 0.7102 - val_acc: 0.7659 Epoch 23/150 2133/2133 [==============================] - 0s 168us/sample - loss: 0.6326 - acc: 0.7065 - val_loss: 0.6860 - val_acc: 0.7753 Epoch 24/150 2133/2133 [==============================] - 0s 173us/sample - loss: 0.6372 - acc: 0.6868 - val_loss: 0.6492 - val_acc: 0.7903 Epoch 25/150 2133/2133 [==============================] - 0s 196us/sample - loss: 0.6222 - acc: 0.7065 - val_loss: 0.7002 - val_acc: 0.7734 Epoch 26/150 2133/2133 [==============================] - 0s 170us/sample - loss: 0.6083 - acc: 0.7145 - val_loss: 0.6298 - val_acc: 0.7865 Epoch 27/150 2133/2133 [==============================] - 0s 169us/sample - loss: 0.6206 - acc: 0.7121 - val_loss: 0.6625 - val_acc: 0.7715 Epoch 28/150 2133/2133 [==============================] - 0s 195us/sample - loss: 0.6067 - acc: 0.7229 - val_loss: 0.7244 - val_acc: 0.7584 Epoch 29/150 2133/2133 [==============================] - 0s 165us/sample - loss: 0.6195 - acc: 0.7098 - val_loss: 0.7049 - val_acc: 0.7547 Epoch 30/150 2133/2133 [==============================] - 0s 181us/sample - loss: 0.6266 - acc: 0.7060 - val_loss: 0.7057 - val_acc: 0.7640 Epoch 31/150 2133/2133 [==============================] - 0s 181us/sample - loss: 0.5918 - acc: 0.7182 - val_loss: 0.7306 - val_acc: 0.7472 Epoch 32/150 2133/2133 [==============================] - 0s 174us/sample - loss: 0.6055 - acc: 0.7131 - val_loss: 0.6951 - val_acc: 0.7659 Epoch 33/150 2133/2133 [==============================] - 0s 163us/sample - loss: 0.6052 - acc: 0.7304 - val_loss: 0.6336 - val_acc: 0.7846 Epoch 34/150 2133/2133 [==============================] - 0s 203us/sample - loss: 0.6204 - acc: 0.7267 - val_loss: 0.6685 - val_acc: 0.7603 Epoch 35/150 2133/2133 [==============================] - 0s 199us/sample - loss: 0.6028 - acc: 0.7215 - val_loss: 0.6312 - val_acc: 0.7828 Epoch 36/150 2133/2133 [==============================] - 0s 174us/sample - loss: 0.6063 - acc: 0.7201 - val_loss: 0.7488 - val_acc: 0.7210 Epoch 37/150 2133/2133 [==============================] - 0s 171us/sample - loss: 0.6190 - acc: 0.7187 - val_loss: 0.6697 - val_acc: 0.7640 Epoch 38/150 2133/2133 [==============================] - 0s 198us/sample - loss: 0.5901 - acc: 0.7314 - val_loss: 0.7371 - val_acc: 0.7303 Epoch 39/150 2133/2133 [==============================] - 0s 175us/sample - loss: 0.6011 - acc: 0.7107 - val_loss: 0.6683 - val_acc: 0.7828 Epoch 40/150 2133/2133 [==============================] - 0s 173us/sample - loss: 0.5905 - acc: 0.7215 - val_loss: 0.6328 - val_acc: 0.7865 Epoch 41/150 2133/2133 [==============================] - 0s 200us/sample - loss: 0.5891 - acc: 0.7257 - val_loss: 0.6316 - val_acc: 0.7884 Epoch 42/150 2133/2133 [==============================] - 0s 161us/sample - loss: 0.5795 - acc: 0.7375 - val_loss: 0.6924 - val_acc: 0.7697 Epoch 43/150 2133/2133 [==============================] - 0s 160us/sample - loss: 0.6110 - acc: 0.7309 - val_loss: 0.6292 - val_acc: 0.7940 Epoch 44/150 2133/2133 [==============================] - 0s 172us/sample - loss: 0.5927 - acc: 0.7229 - val_loss: 0.7064 - val_acc: 0.7584 Epoch 45/150 2133/2133 [==============================] - 0s 165us/sample - loss: 0.5953 - acc: 0.7295 - val_loss: 0.6470 - val_acc: 0.7828 Epoch 46/150 2133/2133 [==============================] - 0s 170us/sample - loss: 0.5998 - acc: 0.7304 - val_loss: 0.6434 - val_acc: 0.7734 Epoch 47/150 2133/2133 [==============================] - 0s 193us/sample - loss: 0.6010 - acc: 0.7243 - val_loss: 0.6528 - val_acc: 0.7734 Epoch 48/150 2133/2133 [==============================] - 0s 176us/sample - loss: 0.5918 - acc: 0.7290 - val_loss: 0.6501 - val_acc: 0.7697 Epoch 49/150 2133/2133 [==============================] - 0s 173us/sample - loss: 0.5686 - acc: 0.7375 - val_loss: 0.5967 - val_acc: 0.7921 Epoch 50/150 2133/2133 [==============================] - 0s 174us/sample - loss: 0.5807 - acc: 0.7276 - val_loss: 0.6124 - val_acc: 0.7903 Epoch 51/150 2133/2133 [==============================] - 0s 174us/sample - loss: 0.6004 - acc: 0.7112 - val_loss: 0.6640 - val_acc: 0.7715 Epoch 52/150 2133/2133 [==============================] - 0s 184us/sample - loss: 0.5733 - acc: 0.7318 - val_loss: 0.6876 - val_acc: 0.7584 Epoch 53/150 2133/2133 [==============================] - 0s 181us/sample - loss: 0.5841 - acc: 0.7342 - val_loss: 0.6494 - val_acc: 0.7772 Epoch 54/150 2133/2133 [==============================] - 0s 163us/sample - loss: 0.5883 - acc: 0.7361 - val_loss: 0.6798 - val_acc: 0.7678 Epoch 55/150 2133/2133 [==============================] - 0s 172us/sample - loss: 0.5832 - acc: 0.7304 - val_loss: 0.6105 - val_acc: 0.8071 Epoch 56/150 2133/2133 [==============================] - 0s 173us/sample - loss: 0.5832 - acc: 0.7389 - val_loss: 0.6941 - val_acc: 0.7697 Epoch 57/150 2133/2133 [==============================] - 0s 167us/sample - loss: 0.5807 - acc: 0.7229 - val_loss: 0.6164 - val_acc: 0.8052 Epoch 58/150 2133/2133 [==============================] - 0s 195us/sample - loss: 0.5908 - acc: 0.7239 - val_loss: 0.6566 - val_acc: 0.7959 Epoch 59/150 2133/2133 [==============================] - 0s 185us/sample - loss: 0.5756 - acc: 0.7342 - val_loss: 0.7120 - val_acc: 0.7547 Epoch 60/150 2133/2133 [==============================] - 0s 200us/sample - loss: 0.5799 - acc: 0.7262 - val_loss: 0.6491 - val_acc: 0.7921 Epoch 61/150 2133/2133 [==============================] - 0s 180us/sample - loss: 0.5747 - acc: 0.7253 - val_loss: 0.6695 - val_acc: 0.7753 Epoch 62/150 2133/2133 [==============================] - 0s 166us/sample - loss: 0.5624 - acc: 0.7482 - val_loss: 0.6905 - val_acc: 0.7790 Epoch 63/150 2133/2133 [==============================] - 0s 171us/sample - loss: 0.5792 - acc: 0.7304 - val_loss: 0.6746 - val_acc: 0.7772 Epoch 64/150 2133/2133 [==============================] - 0s 162us/sample - loss: 0.5763 - acc: 0.7304 - val_loss: 0.6191 - val_acc: 0.8090 Epoch 65/150 2133/2133 [==============================] - 0s 205us/sample - loss: 0.5717 - acc: 0.7459 - val_loss: 0.6879 - val_acc: 0.7584 Epoch 66/150 2133/2133 [==============================] - 0s 162us/sample - loss: 0.5828 - acc: 0.7262 - val_loss: 0.6455 - val_acc: 0.7790 Epoch 67/150 2133/2133 [==============================] - 0s 163us/sample - loss: 0.5735 - acc: 0.7304 - val_loss: 0.5636 - val_acc: 0.8127 Epoch 68/150 2133/2133 [==============================] - 0s 170us/sample - loss: 0.5901 - acc: 0.7351 - val_loss: 0.6569 - val_acc: 0.7472 Epoch 69/150 2133/2133 [==============================] - 0s 161us/sample - loss: 0.5829 - acc: 0.7337 - val_loss: 0.6487 - val_acc: 0.7734 Epoch 70/150 2133/2133 [==============================] - 0s 167us/sample - loss: 0.5624 - acc: 0.7365 - val_loss: 0.6485 - val_acc: 0.7809 Epoch 71/150 2133/2133 [==============================] - 0s 170us/sample - loss: 0.5512 - acc: 0.7487 - val_loss: 0.6119 - val_acc: 0.8052 Epoch 72/150 2133/2133 [==============================] - 0s 166us/sample - loss: 0.5703 - acc: 0.7365 - val_loss: 0.6412 - val_acc: 0.7790 Epoch 73/150 2133/2133 [==============================] - 0s 178us/sample - loss: 0.5742 - acc: 0.7365 - val_loss: 0.6047 - val_acc: 0.8034 Epoch 74/150 2133/2133 [==============================] - 0s 167us/sample - loss: 0.5726 - acc: 0.7290 - val_loss: 0.6547 - val_acc: 0.7640 Epoch 75/150 2133/2133 [==============================] - 0s 164us/sample - loss: 0.5433 - acc: 0.7506 - val_loss: 0.6296 - val_acc: 0.7715 Epoch 76/150 2133/2133 [==============================] - 0s 167us/sample - loss: 0.5614 - acc: 0.7375 - val_loss: 0.6592 - val_acc: 0.7378 Epoch 77/150 2133/2133 [==============================] - 0s 181us/sample - loss: 0.5802 - acc: 0.7379 - val_loss: 0.6638 - val_acc: 0.7603 Epoch 78/150 2133/2133 [==============================] - 0s 179us/sample - loss: 0.5360 - acc: 0.7623 - val_loss: 0.6506 - val_acc: 0.7828 Epoch 79/150 2133/2133 [==============================] - 0s 199us/sample - loss: 0.5501 - acc: 0.7623 - val_loss: 0.6064 - val_acc: 0.7697 Epoch 80/150 2133/2133 [==============================] - 0s 161us/sample - loss: 0.5518 - acc: 0.7496 - val_loss: 0.6274 - val_acc: 0.7360 Epoch 81/150 2133/2133 [==============================] - 0s 170us/sample - loss: 0.5675 - acc: 0.7496 - val_loss: 0.6619 - val_acc: 0.7210 Epoch 82/150 2133/2133 [==============================] - 0s 174us/sample - loss: 0.5494 - acc: 0.7548 - val_loss: 0.6484 - val_acc: 0.7397 Epoch 83/150 2133/2133 [==============================] - 0s 166us/sample - loss: 0.5675 - acc: 0.7567 - val_loss: 0.6420 - val_acc: 0.7509 Epoch 84/150 2133/2133 [==============================] - 0s 182us/sample - loss: 0.5616 - acc: 0.7576 - val_loss: 0.6707 - val_acc: 0.7341 Epoch 85/150 2133/2133 [==============================] - 0s 173us/sample - loss: 0.5631 - acc: 0.7459 - val_loss: 0.6563 - val_acc: 0.7285 Epoch 86/150 2133/2133 [==============================] - 0s 165us/sample - loss: 0.5436 - acc: 0.7520 - val_loss: 0.6505 - val_acc: 0.7360 Epoch 87/150 2133/2133 [==============================] - 0s 168us/sample - loss: 0.5508 - acc: 0.7482 - val_loss: 0.5644 - val_acc: 0.8109 Epoch 88/150 2133/2133 [==============================] - 0s 174us/sample - loss: 0.5588 - acc: 0.7511 - val_loss: 0.6249 - val_acc: 0.7940 Epoch 89/150 2133/2133 [==============================] - 0s 174us/sample - loss: 0.5556 - acc: 0.7571 - val_loss: 0.6411 - val_acc: 0.7828 Epoch 90/150 2133/2133 [==============================] - 0s 175us/sample - loss: 0.5500 - acc: 0.7492 - val_loss: 0.7270 - val_acc: 0.6873 Epoch 91/150 2133/2133 [==============================] - 0s 173us/sample - loss: 0.5388 - acc: 0.7670 - val_loss: 0.6749 - val_acc: 0.7697 Epoch 92/150 2133/2133 [==============================] - 0s 176us/sample - loss: 0.5477 - acc: 0.7543 - val_loss: 0.6144 - val_acc: 0.8071 Epoch 93/150 2133/2133 [==============================] - 0s 222us/sample - loss: 0.5315 - acc: 0.7590 - val_loss: 0.6244 - val_acc: 0.8052 Epoch 94/150 2133/2133 [==============================] - 0s 161us/sample - loss: 0.5329 - acc: 0.7567 - val_loss: 0.6541 - val_acc: 0.7603 Epoch 95/150 2133/2133 [==============================] - 0s 158us/sample - loss: 0.5450 - acc: 0.7543 - val_loss: 0.6839 - val_acc: 0.7228 Epoch 96/150 2133/2133 [==============================] - 0s 180us/sample - loss: 0.5434 - acc: 0.7628 - val_loss: 0.6604 - val_acc: 0.7903 Epoch 97/150 2133/2133 [==============================] - 0s 162us/sample - loss: 0.5316 - acc: 0.7501 - val_loss: 0.6613 - val_acc: 0.7884 Epoch 98/150 2133/2133 [==============================] - 0s 162us/sample - loss: 0.5379 - acc: 0.7553 - val_loss: 0.5937 - val_acc: 0.8240 Epoch 99/150 2133/2133 [==============================] - 0s 184us/sample - loss: 0.5461 - acc: 0.7534 - val_loss: 0.6731 - val_acc: 0.7790 Epoch 100/150 2133/2133 [==============================] - 0s 161us/sample - loss: 0.5419 - acc: 0.7581 - val_loss: 0.5904 - val_acc: 0.8034 Epoch 101/150 2133/2133 [==============================] - 0s 191us/sample - loss: 0.5442 - acc: 0.7412 - val_loss: 0.6333 - val_acc: 0.7959 Epoch 102/150 2133/2133 [==============================] - 0s 181us/sample - loss: 0.5287 - acc: 0.7562 - val_loss: 0.6760 - val_acc: 0.7584 Epoch 103/150 2133/2133 [==============================] - 0s 223us/sample - loss: 0.5513 - acc: 0.7487 - val_loss: 0.6405 - val_acc: 0.7828 Epoch 104/150 2133/2133 [==============================] - 0s 187us/sample - loss: 0.5427 - acc: 0.7543 - val_loss: 0.6415 - val_acc: 0.8015 Epoch 105/150 2133/2133 [==============================] - 0s 170us/sample - loss: 0.5367 - acc: 0.7529 - val_loss: 0.5959 - val_acc: 0.7959 Epoch 106/150 2133/2133 [==============================] - 0s 198us/sample - loss: 0.5460 - acc: 0.7506 - val_loss: 0.6639 - val_acc: 0.7659 Epoch 107/150 2133/2133 [==============================] - 0s 177us/sample - loss: 0.5306 - acc: 0.7581 - val_loss: 0.6325 - val_acc: 0.7921 Epoch 108/150 2133/2133 [==============================] - 0s 178us/sample - loss: 0.5453 - acc: 0.7445 - val_loss: 0.6642 - val_acc: 0.7584 Epoch 109/150 2133/2133 [==============================] - 0s 166us/sample - loss: 0.5343 - acc: 0.7571 - val_loss: 0.6955 - val_acc: 0.7247 Epoch 110/150 2133/2133 [==============================] - 0s 203us/sample - loss: 0.5349 - acc: 0.7623 - val_loss: 0.7911 - val_acc: 0.5243 Epoch 111/150 2133/2133 [==============================] - 0s 218us/sample - loss: 0.5530 - acc: 0.7642 - val_loss: 0.6031 - val_acc: 0.8127 Epoch 112/150 2133/2133 [==============================] - 0s 188us/sample - loss: 0.5491 - acc: 0.7581 - val_loss: 0.6312 - val_acc: 0.7959 Epoch 113/150 2133/2133 [==============================] - 0s 186us/sample - loss: 0.5391 - acc: 0.7628 - val_loss: 0.7543 - val_acc: 0.6648 Epoch 114/150 2133/2133 [==============================] - 0s 181us/sample - loss: 0.5389 - acc: 0.7553 - val_loss: 0.5681 - val_acc: 0.8146 Epoch 115/150 2133/2133 [==============================] - 0s 177us/sample - loss: 0.5504 - acc: 0.7459 - val_loss: 0.6694 - val_acc: 0.7453 Epoch 116/150 2133/2133 [==============================] - 0s 194us/sample - loss: 0.5237 - acc: 0.7665 - val_loss: 0.6613 - val_acc: 0.7528 Epoch 117/150 2133/2133 [==============================] - 0s 166us/sample - loss: 0.5298 - acc: 0.7679 - val_loss: 0.6394 - val_acc: 0.7884 Epoch 118/150 2133/2133 [==============================] - 0s 183us/sample - loss: 0.5407 - acc: 0.7562 - val_loss: 0.6082 - val_acc: 0.8015 Epoch 119/150 2133/2133 [==============================] - 0s 179us/sample - loss: 0.5084 - acc: 0.7675 - val_loss: 0.5900 - val_acc: 0.8258 Epoch 120/150 2133/2133 [==============================] - 0s 159us/sample - loss: 0.5388 - acc: 0.7600 - val_loss: 0.6794 - val_acc: 0.7453 Epoch 121/150 2133/2133 [==============================] - 0s 159us/sample - loss: 0.5517 - acc: 0.7557 - val_loss: 0.6755 - val_acc: 0.7622 Epoch 122/150 2133/2133 [==============================] - 0s 157us/sample - loss: 0.5428 - acc: 0.7628 - val_loss: 0.6361 - val_acc: 0.8090 Epoch 123/150 2133/2133 [==============================] - 0s 165us/sample - loss: 0.5234 - acc: 0.7628 - val_loss: 0.6152 - val_acc: 0.8202 Epoch 124/150 2133/2133 [==============================] - 0s 161us/sample - loss: 0.5376 - acc: 0.7501 - val_loss: 0.6127 - val_acc: 0.8071 Epoch 125/150 2133/2133 [==============================] - 0s 153us/sample - loss: 0.5158 - acc: 0.7740 - val_loss: 0.6485 - val_acc: 0.7715 Epoch 126/150 2133/2133 [==============================] - 0s 158us/sample - loss: 0.5451 - acc: 0.7632 - val_loss: 0.5692 - val_acc: 0.8371 Epoch 127/150 2133/2133 [==============================] - 0s 162us/sample - loss: 0.5408 - acc: 0.7506 - val_loss: 0.5829 - val_acc: 0.8390 Epoch 128/150 2133/2133 [==============================] - 0s 158us/sample - loss: 0.5379 - acc: 0.7567 - val_loss: 0.6709 - val_acc: 0.7772 Epoch 129/150 2133/2133 [==============================] - 0s 162us/sample - loss: 0.5364 - acc: 0.7543 - val_loss: 0.6190 - val_acc: 0.7996 Epoch 130/150 2133/2133 [==============================] - 0s 160us/sample - loss: 0.5369 - acc: 0.7482 - val_loss: 0.6037 - val_acc: 0.8071 Epoch 131/150 2133/2133 [==============================] - 0s 157us/sample - loss: 0.5330 - acc: 0.7609 - val_loss: 0.5708 - val_acc: 0.8240 Epoch 132/150 2133/2133 [==============================] - 0s 156us/sample - loss: 0.5335 - acc: 0.7623 - val_loss: 0.7198 - val_acc: 0.7116 Epoch 133/150 2133/2133 [==============================] - 0s 173us/sample - loss: 0.5344 - acc: 0.7590 - val_loss: 0.6960 - val_acc: 0.7622 Epoch 134/150 2133/2133 [==============================] - 0s 166us/sample - loss: 0.5394 - acc: 0.7637 - val_loss: 0.6840 - val_acc: 0.7846 Epoch 135/150 2133/2133 [==============================] - 0s 162us/sample - loss: 0.5394 - acc: 0.7670 - val_loss: 0.6978 - val_acc: 0.7640 Epoch 136/150 2133/2133 [==============================] - 0s 156us/sample - loss: 0.5327 - acc: 0.7595 - val_loss: 0.5825 - val_acc: 0.8315 Epoch 137/150 2133/2133 [==============================] - 0s 162us/sample - loss: 0.5274 - acc: 0.7623 - val_loss: 0.6093 - val_acc: 0.8240 Epoch 138/150 2133/2133 [==============================] - 0s 163us/sample - loss: 0.5020 - acc: 0.7736 - val_loss: 0.6225 - val_acc: 0.7921 Epoch 139/150 2133/2133 [==============================] - 0s 157us/sample - loss: 0.5136 - acc: 0.7684 - val_loss: 0.5893 - val_acc: 0.8146 Epoch 140/150 2133/2133 [==============================] - 0s 159us/sample - loss: 0.5434 - acc: 0.7684 - val_loss: 0.6160 - val_acc: 0.8202 Epoch 141/150 2133/2133 [==============================] - 0s 156us/sample - loss: 0.5330 - acc: 0.7679 - val_loss: 0.6335 - val_acc: 0.8127 Epoch 142/150 2133/2133 [==============================] - 0s 160us/sample - loss: 0.5392 - acc: 0.7586 - val_loss: 0.6418 - val_acc: 0.8052 Epoch 143/150 2133/2133 [==============================] - 0s 190us/sample - loss: 0.5173 - acc: 0.7750 - val_loss: 0.6038 - val_acc: 0.8090 Epoch 144/150 2133/2133 [==============================] - 0s 201us/sample - loss: 0.5292 - acc: 0.7754 - val_loss: 0.5869 - val_acc: 0.8127 Epoch 145/150 2133/2133 [==============================] - 0s 201us/sample - loss: 0.5301 - acc: 0.7595 - val_loss: 0.5945 - val_acc: 0.8240 Epoch 146/150 2133/2133 [==============================] - 1s 248us/sample - loss: 0.4982 - acc: 0.7839 - val_loss: 0.5702 - val_acc: 0.8333 Epoch 147/150 2133/2133 [==============================] - 0s 201us/sample - loss: 0.5062 - acc: 0.7782 - val_loss: 0.6148 - val_acc: 0.8202 Epoch 148/150 2133/2133 [==============================] - 0s 173us/sample - loss: 0.5265 - acc: 0.7689 - val_loss: 0.5926 - val_acc: 0.8296 Epoch 149/150 2133/2133 [==============================] - 0s 159us/sample - loss: 0.5393 - acc: 0.7637 - val_loss: 0.6849 - val_acc: 0.7734 Epoch 150/150 2133/2133 [==============================] - 0s 206us/sample - loss: 0.4950 - acc: 0.7806 - val_loss: 0.7064 - val_acc: 0.7434
print(history.history.keys())
plt.plot(history.history['loss'])
plt.plot(history.history['val_loss'])
plt.title('model loss')
plt.ylabel('loss')
plt.xlabel('epoch')
plt.legend(['train', 'validation'], loc='upper left')
plt.show()
dict_keys(['loss', 'acc', 'val_loss', 'val_acc'])
y_pred = model_s.predict_classes(X_test)
y_classes = [np.argmax(y, axis=None, out=None) for y in y_test]
print(classification_report(y_pred,y_classes))
precision recall f1-score support
1 0.52 0.09 0.16 119
2 0.67 0.89 0.76 322
3 0.55 0.48 0.51 87
accuracy 0.64 528
macro avg 0.58 0.49 0.48 528
weighted avg 0.61 0.64 0.58 528
Kbest_features['Signal_Strength'] = Kbest_features['Signal_Strength'].apply(categorise_signal)
categorical_labels = to_categorical(Kbest_features['Signal_Strength'], num_classes=None)
X = Kbest_features.drop(columns=['Signal_Strength']).values
Y = categorical_labels
X_train, X_test, y_train, y_test = train_test_split(X, Y,test_size=0.33, random_state=42)
model_k = create_model(8,4)
Model: "sequential_31" _________________________________________________________________ Layer (type) Output Shape Param # ================================================================= dense_102 (Dense) (None, 32) 288 _________________________________________________________________ dropout_71 (Dropout) (None, 32) 0 _________________________________________________________________ dense_103 (Dense) (None, 64) 2112 _________________________________________________________________ dropout_72 (Dropout) (None, 64) 0 _________________________________________________________________ dense_104 (Dense) (None, 4) 260 ================================================================= Total params: 2,660 Trainable params: 2,660 Non-trainable params: 0 _________________________________________________________________ None
model_k.compile(loss='categorical_crossentropy', optimizer='adam', metrics=['accuracy'])
history = model_k.fit(X_train, y_train, epochs=150, batch_size=10, verbose=1, validation_split=0.2)
Train on 856 samples, validate on 215 samples Epoch 1/150 856/856 [==============================] - 1s 860us/sample - loss: 1.0180 - acc: 0.7547 - val_loss: 0.6404 - val_acc: 0.8326 Epoch 2/150 856/856 [==============================] - 0s 216us/sample - loss: 0.5949 - acc: 0.8294 - val_loss: 0.5275 - val_acc: 0.8326 Epoch 3/150 856/856 [==============================] - 0s 175us/sample - loss: 0.5337 - acc: 0.8353 - val_loss: 0.4949 - val_acc: 0.8326 Epoch 4/150 856/856 [==============================] - 0s 190us/sample - loss: 0.5141 - acc: 0.8329 - val_loss: 0.4734 - val_acc: 0.8372 Epoch 5/150 856/856 [==============================] - 0s 178us/sample - loss: 0.5042 - acc: 0.8259 - val_loss: 0.4574 - val_acc: 0.8419 Epoch 6/150 856/856 [==============================] - 0s 174us/sample - loss: 0.4839 - acc: 0.8166 - val_loss: 0.4414 - val_acc: 0.8558 Epoch 7/150 856/856 [==============================] - 0s 172us/sample - loss: 0.4858 - acc: 0.8271 - val_loss: 0.4307 - val_acc: 0.8465 Epoch 8/150 856/856 [==============================] - 0s 179us/sample - loss: 0.4724 - acc: 0.8423 - val_loss: 0.4235 - val_acc: 0.8512 Epoch 9/150 856/856 [==============================] - 0s 169us/sample - loss: 0.4722 - acc: 0.8341 - val_loss: 0.4165 - val_acc: 0.8605 Epoch 10/150 856/856 [==============================] - 0s 234us/sample - loss: 0.4758 - acc: 0.8259 - val_loss: 0.4147 - val_acc: 0.8465 Epoch 11/150 856/856 [==============================] - 0s 190us/sample - loss: 0.4499 - acc: 0.8364 - val_loss: 0.4122 - val_acc: 0.8558 Epoch 12/150 856/856 [==============================] - 0s 192us/sample - loss: 0.4613 - acc: 0.8423 - val_loss: 0.4111 - val_acc: 0.8512 Epoch 13/150 856/856 [==============================] - 0s 245us/sample - loss: 0.4490 - acc: 0.8411 - val_loss: 0.4155 - val_acc: 0.8558 Epoch 14/150 856/856 [==============================] - 0s 190us/sample - loss: 0.4520 - acc: 0.8306 - val_loss: 0.4127 - val_acc: 0.8558 Epoch 15/150 856/856 [==============================] - 0s 185us/sample - loss: 0.4320 - acc: 0.8376 - val_loss: 0.4059 - val_acc: 0.8558 Epoch 16/150 856/856 [==============================] - 0s 195us/sample - loss: 0.4705 - acc: 0.8423 - val_loss: 0.4025 - val_acc: 0.8558 Epoch 17/150 856/856 [==============================] - 0s 179us/sample - loss: 0.4518 - acc: 0.8400 - val_loss: 0.4066 - val_acc: 0.8512 Epoch 18/150 856/856 [==============================] - 0s 181us/sample - loss: 0.4564 - acc: 0.8329 - val_loss: 0.4126 - val_acc: 0.8465 Epoch 19/150 856/856 [==============================] - 0s 238us/sample - loss: 0.4459 - acc: 0.8446 - val_loss: 0.4146 - val_acc: 0.8419 Epoch 20/150 856/856 [==============================] - 0s 179us/sample - loss: 0.4419 - acc: 0.8411 - val_loss: 0.4059 - val_acc: 0.8512 Epoch 21/150 856/856 [==============================] - 0s 192us/sample - loss: 0.4457 - acc: 0.8388 - val_loss: 0.4075 - val_acc: 0.8651 Epoch 22/150 856/856 [==============================] - 0s 186us/sample - loss: 0.4360 - acc: 0.8411 - val_loss: 0.4056 - val_acc: 0.8558 Epoch 23/150 856/856 [==============================] - 0s 229us/sample - loss: 0.4409 - acc: 0.8470 - val_loss: 0.4041 - val_acc: 0.8605 Epoch 24/150 856/856 [==============================] - 0s 199us/sample - loss: 0.4566 - acc: 0.8400 - val_loss: 0.4072 - val_acc: 0.8605 Epoch 25/150 856/856 [==============================] - 0s 179us/sample - loss: 0.4271 - acc: 0.8446 - val_loss: 0.4031 - val_acc: 0.8558 Epoch 26/150 856/856 [==============================] - 0s 180us/sample - loss: 0.4267 - acc: 0.8400 - val_loss: 0.3984 - val_acc: 0.8651 Epoch 27/150 856/856 [==============================] - 0s 236us/sample - loss: 0.4362 - acc: 0.8423 - val_loss: 0.4015 - val_acc: 0.8651 Epoch 28/150 856/856 [==============================] - 0s 183us/sample - loss: 0.4466 - acc: 0.8400 - val_loss: 0.4019 - val_acc: 0.8651 Epoch 29/150 856/856 [==============================] - 0s 176us/sample - loss: 0.4351 - acc: 0.8493 - val_loss: 0.4038 - val_acc: 0.8605 Epoch 30/150 856/856 [==============================] - 0s 195us/sample - loss: 0.4191 - acc: 0.8505 - val_loss: 0.4025 - val_acc: 0.8605 Epoch 31/150 856/856 [==============================] - 0s 231us/sample - loss: 0.4294 - acc: 0.8446 - val_loss: 0.4015 - val_acc: 0.8605 Epoch 32/150 856/856 [==============================] - 0s 394us/sample - loss: 0.4452 - acc: 0.8341 - val_loss: 0.4019 - val_acc: 0.8558 Epoch 33/150 856/856 [==============================] - 0s 192us/sample - loss: 0.4320 - acc: 0.8423 - val_loss: 0.3981 - val_acc: 0.8698 Epoch 34/150 856/856 [==============================] - 0s 180us/sample - loss: 0.4372 - acc: 0.8306 - val_loss: 0.3966 - val_acc: 0.8698 Epoch 35/150 856/856 [==============================] - 0s 173us/sample - loss: 0.4343 - acc: 0.8470 - val_loss: 0.4017 - val_acc: 0.8605 Epoch 36/150 856/856 [==============================] - 0s 169us/sample - loss: 0.4300 - acc: 0.8435 - val_loss: 0.3999 - val_acc: 0.8605 Epoch 37/150 856/856 [==============================] - 0s 190us/sample - loss: 0.4296 - acc: 0.8435 - val_loss: 0.4017 - val_acc: 0.8512 Epoch 38/150 856/856 [==============================] - 0s 228us/sample - loss: 0.4240 - acc: 0.8400 - val_loss: 0.3983 - val_acc: 0.8651 Epoch 39/150 856/856 [==============================] - 0s 186us/sample - loss: 0.4217 - acc: 0.8435 - val_loss: 0.4018 - val_acc: 0.8605 Epoch 40/150 856/856 [==============================] - 0s 179us/sample - loss: 0.4235 - acc: 0.8400 - val_loss: 0.4006 - val_acc: 0.8558 Epoch 41/150 856/856 [==============================] - 0s 192us/sample - loss: 0.4308 - acc: 0.8481 - val_loss: 0.4020 - val_acc: 0.8558 Epoch 42/150 856/856 [==============================] - 0s 235us/sample - loss: 0.4282 - acc: 0.8423 - val_loss: 0.4017 - val_acc: 0.8605 Epoch 43/150 856/856 [==============================] - 0s 186us/sample - loss: 0.4201 - acc: 0.8435 - val_loss: 0.4017 - val_acc: 0.8512 Epoch 44/150 856/856 [==============================] - 0s 196us/sample - loss: 0.4161 - acc: 0.8470 - val_loss: 0.3950 - val_acc: 0.8698 Epoch 45/150 856/856 [==============================] - 0s 232us/sample - loss: 0.4115 - acc: 0.8481 - val_loss: 0.3992 - val_acc: 0.8605 Epoch 46/150 856/856 [==============================] - 0s 173us/sample - loss: 0.4287 - acc: 0.8458 - val_loss: 0.3988 - val_acc: 0.8512 Epoch 47/150 856/856 [==============================] - 0s 173us/sample - loss: 0.4304 - acc: 0.8411 - val_loss: 0.3935 - val_acc: 0.8558 Epoch 48/150 856/856 [==============================] - 0s 195us/sample - loss: 0.4220 - acc: 0.8353 - val_loss: 0.3980 - val_acc: 0.8465 Epoch 49/150 856/856 [==============================] - 0s 188us/sample - loss: 0.4060 - acc: 0.8505 - val_loss: 0.3908 - val_acc: 0.8651 Epoch 50/150 856/856 [==============================] - 0s 206us/sample - loss: 0.4198 - acc: 0.8364 - val_loss: 0.3918 - val_acc: 0.8651 Epoch 51/150 856/856 [==============================] - 0s 235us/sample - loss: 0.4301 - acc: 0.8388 - val_loss: 0.3915 - val_acc: 0.8605 Epoch 52/150 856/856 [==============================] - 0s 189us/sample - loss: 0.4311 - acc: 0.8446 - val_loss: 0.3986 - val_acc: 0.8558 Epoch 53/150 856/856 [==============================] - 0s 196us/sample - loss: 0.4284 - acc: 0.8400 - val_loss: 0.3986 - val_acc: 0.8512 Epoch 54/150 856/856 [==============================] - 0s 189us/sample - loss: 0.4110 - acc: 0.8446 - val_loss: 0.3895 - val_acc: 0.8605 Epoch 55/150 856/856 [==============================] - 0s 180us/sample - loss: 0.4155 - acc: 0.8481 - val_loss: 0.3909 - val_acc: 0.8651 Epoch 56/150 856/856 [==============================] - 0s 169us/sample - loss: 0.4145 - acc: 0.8376 - val_loss: 0.3920 - val_acc: 0.8419 Epoch 57/150 856/856 [==============================] - 0s 178us/sample - loss: 0.4048 - acc: 0.8505 - val_loss: 0.3899 - val_acc: 0.8651 Epoch 58/150 856/856 [==============================] - 0s 181us/sample - loss: 0.4154 - acc: 0.8411 - val_loss: 0.3960 - val_acc: 0.8558 Epoch 59/150 856/856 [==============================] - 0s 175us/sample - loss: 0.4295 - acc: 0.8388 - val_loss: 0.3940 - val_acc: 0.8512 Epoch 60/150 856/856 [==============================] - 0s 174us/sample - loss: 0.4226 - acc: 0.8516 - val_loss: 0.3979 - val_acc: 0.8512 Epoch 61/150 856/856 [==============================] - 0s 160us/sample - loss: 0.4153 - acc: 0.8575 - val_loss: 0.3882 - val_acc: 0.8605 Epoch 62/150 856/856 [==============================] - 0s 248us/sample - loss: 0.4337 - acc: 0.8388 - val_loss: 0.3874 - val_acc: 0.8605 Epoch 63/150 856/856 [==============================] - 0s 202us/sample - loss: 0.3934 - acc: 0.8516 - val_loss: 0.3882 - val_acc: 0.8605 Epoch 64/150 856/856 [==============================] - 0s 169us/sample - loss: 0.4061 - acc: 0.8481 - val_loss: 0.3862 - val_acc: 0.8558 Epoch 65/150 856/856 [==============================] - 0s 194us/sample - loss: 0.4099 - acc: 0.8435 - val_loss: 0.3902 - val_acc: 0.8605 Epoch 66/150 856/856 [==============================] - 0s 188us/sample - loss: 0.4169 - acc: 0.8446 - val_loss: 0.3924 - val_acc: 0.8558 Epoch 67/150 856/856 [==============================] - 0s 173us/sample - loss: 0.4122 - acc: 0.8481 - val_loss: 0.3906 - val_acc: 0.8605 Epoch 68/150 856/856 [==============================] - 0s 166us/sample - loss: 0.4020 - acc: 0.8411 - val_loss: 0.3909 - val_acc: 0.8605 Epoch 69/150 856/856 [==============================] - 0s 180us/sample - loss: 0.4132 - acc: 0.8458 - val_loss: 0.3878 - val_acc: 0.8558 Epoch 70/150 856/856 [==============================] - 0s 203us/sample - loss: 0.4039 - acc: 0.8446 - val_loss: 0.3889 - val_acc: 0.8605 Epoch 71/150 856/856 [==============================] - 0s 166us/sample - loss: 0.4044 - acc: 0.8575 - val_loss: 0.3893 - val_acc: 0.8558 Epoch 72/150 856/856 [==============================] - 0s 197us/sample - loss: 0.4159 - acc: 0.8364 - val_loss: 0.3897 - val_acc: 0.8512 Epoch 73/150 856/856 [==============================] - 0s 179us/sample - loss: 0.3879 - acc: 0.8493 - val_loss: 0.3844 - val_acc: 0.8605 Epoch 74/150 856/856 [==============================] - 0s 176us/sample - loss: 0.4210 - acc: 0.8458 - val_loss: 0.3886 - val_acc: 0.8605 Epoch 75/150 856/856 [==============================] - 0s 201us/sample - loss: 0.3986 - acc: 0.8458 - val_loss: 0.3821 - val_acc: 0.8605 Epoch 76/150 856/856 [==============================] - 0s 179us/sample - loss: 0.4203 - acc: 0.8540 - val_loss: 0.3864 - val_acc: 0.8605 Epoch 77/150 856/856 [==============================] - 0s 186us/sample - loss: 0.4123 - acc: 0.8423 - val_loss: 0.3881 - val_acc: 0.8651 Epoch 78/150 856/856 [==============================] - 0s 173us/sample - loss: 0.4038 - acc: 0.8481 - val_loss: 0.3888 - val_acc: 0.8558 Epoch 79/150 856/856 [==============================] - 0s 196us/sample - loss: 0.4035 - acc: 0.8411 - val_loss: 0.3898 - val_acc: 0.8512 Epoch 80/150 856/856 [==============================] - 0s 186us/sample - loss: 0.3984 - acc: 0.8446 - val_loss: 0.3905 - val_acc: 0.8465 Epoch 81/150 856/856 [==============================] - 0s 206us/sample - loss: 0.4193 - acc: 0.8435 - val_loss: 0.3901 - val_acc: 0.8465 Epoch 82/150 856/856 [==============================] - 0s 181us/sample - loss: 0.4017 - acc: 0.8446 - val_loss: 0.3940 - val_acc: 0.8512 Epoch 83/150 856/856 [==============================] - 0s 196us/sample - loss: 0.4054 - acc: 0.8505 - val_loss: 0.3918 - val_acc: 0.8512 Epoch 84/150 856/856 [==============================] - 0s 179us/sample - loss: 0.4064 - acc: 0.8481 - val_loss: 0.3848 - val_acc: 0.8744 Epoch 85/150 856/856 [==============================] - 0s 187us/sample - loss: 0.3922 - acc: 0.8551 - val_loss: 0.3898 - val_acc: 0.8512 Epoch 86/150 856/856 [==============================] - 0s 186us/sample - loss: 0.4033 - acc: 0.8435 - val_loss: 0.3869 - val_acc: 0.8465 Epoch 87/150 856/856 [==============================] - 0s 232us/sample - loss: 0.4043 - acc: 0.8563 - val_loss: 0.3870 - val_acc: 0.8558 Epoch 88/150 856/856 [==============================] - 0s 181us/sample - loss: 0.3989 - acc: 0.8400 - val_loss: 0.3951 - val_acc: 0.8512 Epoch 89/150 856/856 [==============================] - 0s 194us/sample - loss: 0.4006 - acc: 0.8376 - val_loss: 0.3817 - val_acc: 0.8651 Epoch 90/150 856/856 [==============================] - 0s 194us/sample - loss: 0.4157 - acc: 0.8411 - val_loss: 0.3863 - val_acc: 0.8605 Epoch 91/150 856/856 [==============================] - 0s 173us/sample - loss: 0.4012 - acc: 0.8516 - val_loss: 0.3898 - val_acc: 0.8558 Epoch 92/150 856/856 [==============================] - 0s 199us/sample - loss: 0.4051 - acc: 0.8470 - val_loss: 0.3937 - val_acc: 0.8465 Epoch 93/150 856/856 [==============================] - 0s 235us/sample - loss: 0.3994 - acc: 0.8551 - val_loss: 0.3925 - val_acc: 0.8558 Epoch 94/150 856/856 [==============================] - 0s 207us/sample - loss: 0.4148 - acc: 0.8400 - val_loss: 0.3915 - val_acc: 0.8512 Epoch 95/150 856/856 [==============================] - 0s 188us/sample - loss: 0.3988 - acc: 0.8446 - val_loss: 0.3862 - val_acc: 0.8651 Epoch 96/150 856/856 [==============================] - 0s 194us/sample - loss: 0.4083 - acc: 0.8364 - val_loss: 0.3902 - val_acc: 0.8698 Epoch 97/150 856/856 [==============================] - 0s 178us/sample - loss: 0.3954 - acc: 0.8540 - val_loss: 0.3909 - val_acc: 0.8512 Epoch 98/150 856/856 [==============================] - 0s 217us/sample - loss: 0.4066 - acc: 0.8481 - val_loss: 0.3865 - val_acc: 0.8651 Epoch 99/150 856/856 [==============================] - 0s 181us/sample - loss: 0.4062 - acc: 0.8516 - val_loss: 0.3856 - val_acc: 0.8651 Epoch 100/150 856/856 [==============================] - 0s 188us/sample - loss: 0.3964 - acc: 0.8540 - val_loss: 0.3924 - val_acc: 0.8558 Epoch 101/150 856/856 [==============================] - 0s 192us/sample - loss: 0.4097 - acc: 0.8353 - val_loss: 0.3880 - val_acc: 0.8605 Epoch 102/150 856/856 [==============================] - 0s 193us/sample - loss: 0.3941 - acc: 0.8540 - val_loss: 0.3903 - val_acc: 0.8605 Epoch 103/150 856/856 [==============================] - 0s 188us/sample - loss: 0.3851 - acc: 0.8563 - val_loss: 0.3894 - val_acc: 0.8512 Epoch 104/150 856/856 [==============================] - 0s 178us/sample - loss: 0.3981 - acc: 0.8516 - val_loss: 0.3907 - val_acc: 0.8605 Epoch 105/150 856/856 [==============================] - 0s 172us/sample - loss: 0.4034 - acc: 0.8481 - val_loss: 0.3905 - val_acc: 0.8512 Epoch 106/150 856/856 [==============================] - 0s 265us/sample - loss: 0.3903 - acc: 0.8586 - val_loss: 0.3833 - val_acc: 0.8512 Epoch 107/150 856/856 [==============================] - 0s 190us/sample - loss: 0.4001 - acc: 0.8575 - val_loss: 0.3881 - val_acc: 0.8558 Epoch 108/150 856/856 [==============================] - 0s 180us/sample - loss: 0.3843 - acc: 0.8505 - val_loss: 0.3829 - val_acc: 0.8558 Epoch 109/150 856/856 [==============================] - 0s 179us/sample - loss: 0.3844 - acc: 0.8516 - val_loss: 0.3806 - val_acc: 0.8558 Epoch 110/150 856/856 [==============================] - 0s 195us/sample - loss: 0.4027 - acc: 0.8411 - val_loss: 0.3859 - val_acc: 0.8605 Epoch 111/150 856/856 [==============================] - 0s 197us/sample - loss: 0.3972 - acc: 0.8598 - val_loss: 0.3866 - val_acc: 0.8512 Epoch 112/150 856/856 [==============================] - 0s 189us/sample - loss: 0.3922 - acc: 0.8551 - val_loss: 0.3820 - val_acc: 0.8605 Epoch 113/150 856/856 [==============================] - 0s 183us/sample - loss: 0.4093 - acc: 0.8470 - val_loss: 0.3897 - val_acc: 0.8605 Epoch 114/150 856/856 [==============================] - 0s 186us/sample - loss: 0.3885 - acc: 0.8621 - val_loss: 0.3921 - val_acc: 0.8512 Epoch 115/150 856/856 [==============================] - 0s 176us/sample - loss: 0.3948 - acc: 0.8540 - val_loss: 0.3888 - val_acc: 0.8512 Epoch 116/150 856/856 [==============================] - 0s 183us/sample - loss: 0.3878 - acc: 0.8470 - val_loss: 0.3902 - val_acc: 0.8512 Epoch 117/150 856/856 [==============================] - 0s 203us/sample - loss: 0.3935 - acc: 0.8423 - val_loss: 0.3862 - val_acc: 0.8558 Epoch 118/150 856/856 [==============================] - 0s 174us/sample - loss: 0.3978 - acc: 0.8551 - val_loss: 0.3823 - val_acc: 0.8651 Epoch 119/150 856/856 [==============================] - 0s 232us/sample - loss: 0.3866 - acc: 0.8633 - val_loss: 0.3816 - val_acc: 0.8651 Epoch 120/150 856/856 [==============================] - 0s 227us/sample - loss: 0.3910 - acc: 0.8481 - val_loss: 0.3816 - val_acc: 0.8651 Epoch 121/150 856/856 [==============================] - 0s 175us/sample - loss: 0.3824 - acc: 0.8516 - val_loss: 0.3841 - val_acc: 0.8605 Epoch 122/150 856/856 [==============================] - 0s 178us/sample - loss: 0.3866 - acc: 0.8633 - val_loss: 0.3807 - val_acc: 0.8651 Epoch 123/150 856/856 [==============================] - 0s 171us/sample - loss: 0.3867 - acc: 0.8516 - val_loss: 0.3832 - val_acc: 0.8558 Epoch 124/150 856/856 [==============================] - 0s 236us/sample - loss: 0.3871 - acc: 0.8575 - val_loss: 0.3846 - val_acc: 0.8605 Epoch 125/150 856/856 [==============================] - 0s 204us/sample - loss: 0.4017 - acc: 0.8481 - val_loss: 0.3897 - val_acc: 0.8558 Epoch 126/150 856/856 [==============================] - 0s 208us/sample - loss: 0.3797 - acc: 0.8586 - val_loss: 0.3975 - val_acc: 0.8558 Epoch 127/150 856/856 [==============================] - 0s 203us/sample - loss: 0.3814 - acc: 0.8563 - val_loss: 0.3912 - val_acc: 0.8512 Epoch 128/150 856/856 [==============================] - 0s 201us/sample - loss: 0.3967 - acc: 0.8598 - val_loss: 0.3845 - val_acc: 0.8651 Epoch 129/150 856/856 [==============================] - 0s 174us/sample - loss: 0.3949 - acc: 0.8493 - val_loss: 0.3828 - val_acc: 0.8651 Epoch 130/150 856/856 [==============================] - 0s 187us/sample - loss: 0.3894 - acc: 0.8563 - val_loss: 0.3805 - val_acc: 0.8651 Epoch 131/150 856/856 [==============================] - 0s 243us/sample - loss: 0.3881 - acc: 0.8481 - val_loss: 0.3823 - val_acc: 0.8651 Epoch 132/150 856/856 [==============================] - 0s 175us/sample - loss: 0.3830 - acc: 0.8493 - val_loss: 0.3835 - val_acc: 0.8605 Epoch 133/150 856/856 [==============================] - 0s 165us/sample - loss: 0.3787 - acc: 0.8493 - val_loss: 0.3917 - val_acc: 0.8651 Epoch 134/150 856/856 [==============================] - 0s 161us/sample - loss: 0.3793 - acc: 0.8540 - val_loss: 0.3841 - val_acc: 0.8651 Epoch 135/150 856/856 [==============================] - 0s 166us/sample - loss: 0.3830 - acc: 0.8610 - val_loss: 0.3842 - val_acc: 0.8651 Epoch 136/150 856/856 [==============================] - 0s 169us/sample - loss: 0.3759 - acc: 0.8551 - val_loss: 0.3844 - val_acc: 0.8605 Epoch 137/150 856/856 [==============================] - 0s 172us/sample - loss: 0.3912 - acc: 0.8551 - val_loss: 0.3833 - val_acc: 0.8651 Epoch 138/150 856/856 [==============================] - 0s 168us/sample - loss: 0.4045 - acc: 0.8540 - val_loss: 0.3823 - val_acc: 0.8651 Epoch 139/150 856/856 [==============================] - 0s 160us/sample - loss: 0.3874 - acc: 0.8516 - val_loss: 0.3884 - val_acc: 0.8558 Epoch 140/150 856/856 [==============================] - 0s 169us/sample - loss: 0.3821 - acc: 0.8610 - val_loss: 0.3869 - val_acc: 0.8605 Epoch 141/150 856/856 [==============================] - 0s 164us/sample - loss: 0.3793 - acc: 0.8540 - val_loss: 0.3882 - val_acc: 0.8558 Epoch 142/150 856/856 [==============================] - 0s 174us/sample - loss: 0.3862 - acc: 0.8563 - val_loss: 0.3864 - val_acc: 0.8651 Epoch 143/150 856/856 [==============================] - 0s 167us/sample - loss: 0.3782 - acc: 0.8633 - val_loss: 0.3821 - val_acc: 0.8651 Epoch 144/150 856/856 [==============================] - 0s 161us/sample - loss: 0.3749 - acc: 0.8540 - val_loss: 0.3842 - val_acc: 0.8651 Epoch 145/150 856/856 [==============================] - 0s 169us/sample - loss: 0.3993 - acc: 0.8505 - val_loss: 0.3834 - val_acc: 0.8605 Epoch 146/150 856/856 [==============================] - 0s 160us/sample - loss: 0.3713 - acc: 0.8563 - val_loss: 0.3850 - val_acc: 0.8651 Epoch 147/150 856/856 [==============================] - 0s 173us/sample - loss: 0.3826 - acc: 0.8540 - val_loss: 0.3854 - val_acc: 0.8651 Epoch 148/150 856/856 [==============================] - 0s 175us/sample - loss: 0.3735 - acc: 0.8586 - val_loss: 0.3897 - val_acc: 0.8558 Epoch 149/150 856/856 [==============================] - 0s 179us/sample - loss: 0.3783 - acc: 0.8610 - val_loss: 0.3848 - val_acc: 0.8605 Epoch 150/150 856/856 [==============================] - 0s 168us/sample - loss: 0.3631 - acc: 0.8668 - val_loss: 0.3855 - val_acc: 0.8605
y_pred = model_k.predict_classes(X_test)
y_classes = [np.argmax(y, axis=None, out=None) for y in y_test]
print(classification_report(y_pred,y_classes))
precision recall f1-score support
1 0.00 0.00 0.00 0
2 0.94 0.84 0.89 482
3 0.27 0.46 0.34 46
accuracy 0.81 528
macro avg 0.40 0.43 0.41 528
weighted avg 0.88 0.81 0.84 528
df2 = df1.copy()
df2['Signal_Strength'] = df2['Signal_Strength'].apply(categorise_signal)
categorical_labels = to_categorical(df2['Signal_Strength'], num_classes=None)
X = df2.drop(columns=['Signal_Strength']).values
Y = categorical_labels
X_train, X_test, y_train, y_test = train_test_split(X, Y,test_size=0.33, random_state=42)
model1 = create_model(11,4)
Model: "sequential_33" _________________________________________________________________ Layer (type) Output Shape Param # ================================================================= dense_108 (Dense) (None, 32) 384 _________________________________________________________________ dropout_75 (Dropout) (None, 32) 0 _________________________________________________________________ dense_109 (Dense) (None, 64) 2112 _________________________________________________________________ dropout_76 (Dropout) (None, 64) 0 _________________________________________________________________ dense_110 (Dense) (None, 4) 260 ================================================================= Total params: 2,756 Trainable params: 2,756 Non-trainable params: 0 _________________________________________________________________ None
model1.compile(loss='categorical_crossentropy', optimizer='adam', metrics=['accuracy'])
history = model1.fit(X_train, y_train, epochs=150, batch_size=10, verbose=1, validation_split=0.2)
Train on 856 samples, validate on 215 samples Epoch 1/150 856/856 [==============================] - 1s 895us/sample - loss: 1.2093 - acc: 0.6671 - val_loss: 0.5764 - val_acc: 0.8326 Epoch 2/150 856/856 [==============================] - 0s 174us/sample - loss: 0.7859 - acc: 0.7967 - val_loss: 0.5984 - val_acc: 0.8326 Epoch 3/150 856/856 [==============================] - 0s 187us/sample - loss: 0.7189 - acc: 0.7991 - val_loss: 0.5484 - val_acc: 0.8326 Epoch 4/150 856/856 [==============================] - 0s 188us/sample - loss: 0.6817 - acc: 0.8178 - val_loss: 0.5352 - val_acc: 0.8326 Epoch 5/150 856/856 [==============================] - 0s 182us/sample - loss: 0.6157 - acc: 0.8283 - val_loss: 0.5457 - val_acc: 0.8326 Epoch 6/150 856/856 [==============================] - 0s 175us/sample - loss: 0.6046 - acc: 0.8283 - val_loss: 0.5451 - val_acc: 0.8326 Epoch 7/150 856/856 [==============================] - 0s 183us/sample - loss: 0.5852 - acc: 0.8294 - val_loss: 0.5252 - val_acc: 0.8326 Epoch 8/150 856/856 [==============================] - 0s 249us/sample - loss: 0.5917 - acc: 0.8294 - val_loss: 0.5239 - val_acc: 0.8326 Epoch 9/150 856/856 [==============================] - 0s 230us/sample - loss: 0.5855 - acc: 0.8283 - val_loss: 0.5239 - val_acc: 0.8326 Epoch 10/150 856/856 [==============================] - 0s 185us/sample - loss: 0.5807 - acc: 0.8294 - val_loss: 0.5288 - val_acc: 0.8326 Epoch 11/150 856/856 [==============================] - 0s 197us/sample - loss: 0.5702 - acc: 0.8294 - val_loss: 0.5298 - val_acc: 0.8326 Epoch 12/150 856/856 [==============================] - 0s 190us/sample - loss: 0.5811 - acc: 0.8283 - val_loss: 0.5302 - val_acc: 0.8326 Epoch 13/150 856/856 [==============================] - 0s 249us/sample - loss: 0.5646 - acc: 0.8294 - val_loss: 0.5329 - val_acc: 0.8326 Epoch 14/150 856/856 [==============================] - 0s 186us/sample - loss: 0.5678 - acc: 0.8283 - val_loss: 0.5164 - val_acc: 0.8326 Epoch 15/150 856/856 [==============================] - 0s 197us/sample - loss: 0.5579 - acc: 0.8294 - val_loss: 0.5190 - val_acc: 0.8326 Epoch 16/150 856/856 [==============================] - 0s 232us/sample - loss: 0.5438 - acc: 0.8294 - val_loss: 0.5289 - val_acc: 0.8326 Epoch 17/150 856/856 [==============================] - 0s 243us/sample - loss: 0.5576 - acc: 0.8283 - val_loss: 0.5346 - val_acc: 0.8326 Epoch 18/150 856/856 [==============================] - 0s 188us/sample - loss: 0.5594 - acc: 0.8283 - val_loss: 0.5290 - val_acc: 0.8326 Epoch 19/150 856/856 [==============================] - 0s 194us/sample - loss: 0.5194 - acc: 0.8318 - val_loss: 0.5120 - val_acc: 0.8326 Epoch 20/150 856/856 [==============================] - 0s 183us/sample - loss: 0.5522 - acc: 0.8306 - val_loss: 0.5204 - val_acc: 0.8326 Epoch 21/150 856/856 [==============================] - 0s 187us/sample - loss: 0.5446 - acc: 0.8271 - val_loss: 0.5289 - val_acc: 0.8326 Epoch 22/150 856/856 [==============================] - 0s 179us/sample - loss: 0.5294 - acc: 0.8306 - val_loss: 0.5108 - val_acc: 0.8326 Epoch 23/150 856/856 [==============================] - 0s 179us/sample - loss: 0.5366 - acc: 0.8283 - val_loss: 0.5142 - val_acc: 0.8326 Epoch 24/150 856/856 [==============================] - 0s 179us/sample - loss: 0.5445 - acc: 0.8271 - val_loss: 0.5159 - val_acc: 0.8326 Epoch 25/150 856/856 [==============================] - 0s 211us/sample - loss: 0.5280 - acc: 0.8294 - val_loss: 0.5108 - val_acc: 0.8326 Epoch 26/150 856/856 [==============================] - 0s 284us/sample - loss: 0.5438 - acc: 0.8294 - val_loss: 0.5110 - val_acc: 0.8326 Epoch 27/150 856/856 [==============================] - 0s 182us/sample - loss: 0.5388 - acc: 0.8271 - val_loss: 0.5092 - val_acc: 0.8326 Epoch 28/150 856/856 [==============================] - 0s 187us/sample - loss: 0.5280 - acc: 0.8294 - val_loss: 0.5174 - val_acc: 0.8326 Epoch 29/150 856/856 [==============================] - 0s 194us/sample - loss: 0.5330 - acc: 0.8294 - val_loss: 0.5118 - val_acc: 0.8326 Epoch 30/150 856/856 [==============================] - 0s 187us/sample - loss: 0.5309 - acc: 0.8294 - val_loss: 0.5108 - val_acc: 0.8326 Epoch 31/150 856/856 [==============================] - 0s 195us/sample - loss: 0.5225 - acc: 0.8294 - val_loss: 0.5081 - val_acc: 0.8326 Epoch 32/150 856/856 [==============================] - 0s 202us/sample - loss: 0.5323 - acc: 0.8271 - val_loss: 0.5131 - val_acc: 0.8326 Epoch 33/150 856/856 [==============================] - 0s 182us/sample - loss: 0.5257 - acc: 0.8306 - val_loss: 0.4980 - val_acc: 0.8326 Epoch 34/150 856/856 [==============================] - 0s 183us/sample - loss: 0.5274 - acc: 0.8329 - val_loss: 0.5105 - val_acc: 0.8326 Epoch 35/150 856/856 [==============================] - 0s 188us/sample - loss: 0.5389 - acc: 0.8294 - val_loss: 0.5052 - val_acc: 0.8326 Epoch 36/150 856/856 [==============================] - 0s 218us/sample - loss: 0.5258 - acc: 0.8294 - val_loss: 0.5061 - val_acc: 0.8326 Epoch 37/150 856/856 [==============================] - 0s 239us/sample - loss: 0.5325 - acc: 0.8306 - val_loss: 0.5034 - val_acc: 0.8326 Epoch 38/150 856/856 [==============================] - 0s 243us/sample - loss: 0.5302 - acc: 0.8306 - val_loss: 0.5044 - val_acc: 0.8326 Epoch 39/150 856/856 [==============================] - 0s 201us/sample - loss: 0.5250 - acc: 0.8341 - val_loss: 0.4993 - val_acc: 0.8326 Epoch 40/150 856/856 [==============================] - 0s 247us/sample - loss: 0.5313 - acc: 0.8283 - val_loss: 0.4969 - val_acc: 0.8326 Epoch 41/150 856/856 [==============================] - 0s 217us/sample - loss: 0.5288 - acc: 0.8329 - val_loss: 0.4853 - val_acc: 0.8326 Epoch 42/150 856/856 [==============================] - 0s 307us/sample - loss: 0.5308 - acc: 0.8306 - val_loss: 0.5002 - val_acc: 0.8326 Epoch 43/150 856/856 [==============================] - 0s 193us/sample - loss: 0.5245 - acc: 0.8318 - val_loss: 0.5103 - val_acc: 0.8326 Epoch 44/150 856/856 [==============================] - 0s 179us/sample - loss: 0.5211 - acc: 0.8318 - val_loss: 0.4838 - val_acc: 0.8326 Epoch 45/150 856/856 [==============================] - 0s 195us/sample - loss: 0.5259 - acc: 0.8294 - val_loss: 0.4892 - val_acc: 0.8326 Epoch 46/150 856/856 [==============================] - 0s 181us/sample - loss: 0.5275 - acc: 0.8329 - val_loss: 0.4967 - val_acc: 0.8326 Epoch 47/150 856/856 [==============================] - 0s 183us/sample - loss: 0.5238 - acc: 0.8294 - val_loss: 0.4948 - val_acc: 0.8326 Epoch 48/150 856/856 [==============================] - 0s 186us/sample - loss: 0.5105 - acc: 0.8306 - val_loss: 0.4835 - val_acc: 0.8326 Epoch 49/150 856/856 [==============================] - 0s 239us/sample - loss: 0.5218 - acc: 0.8271 - val_loss: 0.4899 - val_acc: 0.8326 Epoch 50/150 856/856 [==============================] - 0s 188us/sample - loss: 0.5057 - acc: 0.8306 - val_loss: 0.4801 - val_acc: 0.8326 Epoch 51/150 856/856 [==============================] - 0s 174us/sample - loss: 0.5138 - acc: 0.8329 - val_loss: 0.4936 - val_acc: 0.8326 Epoch 52/150 856/856 [==============================] - 0s 181us/sample - loss: 0.5230 - acc: 0.8318 - val_loss: 0.4890 - val_acc: 0.8326 Epoch 53/150 856/856 [==============================] - 0s 201us/sample - loss: 0.5245 - acc: 0.8294 - val_loss: 0.4837 - val_acc: 0.8326 Epoch 54/150 856/856 [==============================] - 0s 187us/sample - loss: 0.5124 - acc: 0.8329 - val_loss: 0.4765 - val_acc: 0.8326 Epoch 55/150 856/856 [==============================] - 0s 181us/sample - loss: 0.5133 - acc: 0.8283 - val_loss: 0.4867 - val_acc: 0.8326 Epoch 56/150 856/856 [==============================] - 0s 179us/sample - loss: 0.5153 - acc: 0.8364 - val_loss: 0.4741 - val_acc: 0.8326 Epoch 57/150 856/856 [==============================] - 0s 169us/sample - loss: 0.5022 - acc: 0.8364 - val_loss: 0.4643 - val_acc: 0.8326 Epoch 58/150 856/856 [==============================] - 0s 259us/sample - loss: 0.5079 - acc: 0.8306 - val_loss: 0.4738 - val_acc: 0.8326 Epoch 59/150 856/856 [==============================] - 0s 290us/sample - loss: 0.5207 - acc: 0.8306 - val_loss: 0.4644 - val_acc: 0.8326 Epoch 60/150 856/856 [==============================] - 0s 243us/sample - loss: 0.5083 - acc: 0.8283 - val_loss: 0.4754 - val_acc: 0.8326 Epoch 61/150 856/856 [==============================] - 0s 181us/sample - loss: 0.5075 - acc: 0.8306 - val_loss: 0.4781 - val_acc: 0.8326 Epoch 62/150 856/856 [==============================] - 0s 172us/sample - loss: 0.5111 - acc: 0.8329 - val_loss: 0.4733 - val_acc: 0.8326 Epoch 63/150 856/856 [==============================] - 0s 179us/sample - loss: 0.5217 - acc: 0.8283 - val_loss: 0.4884 - val_acc: 0.8326 Epoch 64/150 856/856 [==============================] - 0s 181us/sample - loss: 0.4984 - acc: 0.8353 - val_loss: 0.4877 - val_acc: 0.8326 Epoch 65/150 856/856 [==============================] - 0s 171us/sample - loss: 0.5074 - acc: 0.8341 - val_loss: 0.4807 - val_acc: 0.8326 Epoch 66/150 856/856 [==============================] - 0s 178us/sample - loss: 0.5022 - acc: 0.8318 - val_loss: 0.4764 - val_acc: 0.8326 Epoch 67/150 856/856 [==============================] - 0s 168us/sample - loss: 0.4846 - acc: 0.8364 - val_loss: 0.4598 - val_acc: 0.8326 Epoch 68/150 856/856 [==============================] - 0s 232us/sample - loss: 0.5005 - acc: 0.8283 - val_loss: 0.4678 - val_acc: 0.8326 Epoch 69/150 856/856 [==============================] - 0s 182us/sample - loss: 0.5062 - acc: 0.8329 - val_loss: 0.4604 - val_acc: 0.8326 Epoch 70/150 856/856 [==============================] - 0s 173us/sample - loss: 0.5038 - acc: 0.8283 - val_loss: 0.4753 - val_acc: 0.8326 Epoch 71/150 856/856 [==============================] - 0s 186us/sample - loss: 0.5025 - acc: 0.8236 - val_loss: 0.4676 - val_acc: 0.8326 Epoch 72/150 856/856 [==============================] - 0s 254us/sample - loss: 0.4910 - acc: 0.8329 - val_loss: 0.4651 - val_acc: 0.8326 Epoch 73/150 856/856 [==============================] - 0s 181us/sample - loss: 0.4988 - acc: 0.8306 - val_loss: 0.4656 - val_acc: 0.8326 Epoch 74/150 856/856 [==============================] - 0s 186us/sample - loss: 0.4940 - acc: 0.8294 - val_loss: 0.4556 - val_acc: 0.8326 Epoch 75/150 856/856 [==============================] - 0s 213us/sample - loss: 0.5080 - acc: 0.8259 - val_loss: 0.4729 - val_acc: 0.8326 Epoch 76/150 856/856 [==============================] - 0s 175us/sample - loss: 0.4986 - acc: 0.8329 - val_loss: 0.4505 - val_acc: 0.8326 Epoch 77/150 856/856 [==============================] - 0s 174us/sample - loss: 0.5105 - acc: 0.8306 - val_loss: 0.4565 - val_acc: 0.8326 Epoch 78/150 856/856 [==============================] - 0s 179us/sample - loss: 0.4963 - acc: 0.8318 - val_loss: 0.4640 - val_acc: 0.8326 Epoch 79/150 856/856 [==============================] - 0s 208us/sample - loss: 0.4874 - acc: 0.8364 - val_loss: 0.4654 - val_acc: 0.8326 Epoch 80/150 856/856 [==============================] - 0s 192us/sample - loss: 0.5014 - acc: 0.8294 - val_loss: 0.4708 - val_acc: 0.8326 Epoch 81/150 856/856 [==============================] - 0s 178us/sample - loss: 0.4992 - acc: 0.8341 - val_loss: 0.4709 - val_acc: 0.8326 Epoch 82/150 856/856 [==============================] - 0s 180us/sample - loss: 0.4914 - acc: 0.8353 - val_loss: 0.4636 - val_acc: 0.8326 Epoch 83/150 856/856 [==============================] - 0s 183us/sample - loss: 0.4843 - acc: 0.8435 - val_loss: 0.4800 - val_acc: 0.8326 Epoch 84/150 856/856 [==============================] - 0s 176us/sample - loss: 0.4985 - acc: 0.8318 - val_loss: 0.4568 - val_acc: 0.8326 Epoch 85/150 856/856 [==============================] - 0s 181us/sample - loss: 0.5021 - acc: 0.8329 - val_loss: 0.4669 - val_acc: 0.8326 Epoch 86/150 856/856 [==============================] - 0s 174us/sample - loss: 0.5114 - acc: 0.8248 - val_loss: 0.4696 - val_acc: 0.8326 Epoch 87/150 856/856 [==============================] - 0s 189us/sample - loss: 0.4781 - acc: 0.8329 - val_loss: 0.4622 - val_acc: 0.8326 Epoch 88/150 856/856 [==============================] - 0s 218us/sample - loss: 0.4992 - acc: 0.8376 - val_loss: 0.4743 - val_acc: 0.8372 Epoch 89/150 856/856 [==============================] - 0s 216us/sample - loss: 0.4932 - acc: 0.8388 - val_loss: 0.4593 - val_acc: 0.8326 Epoch 90/150 856/856 [==============================] - 0s 199us/sample - loss: 0.4898 - acc: 0.8388 - val_loss: 0.4607 - val_acc: 0.8326 Epoch 91/150 856/856 [==============================] - 0s 180us/sample - loss: 0.4882 - acc: 0.8400 - val_loss: 0.4533 - val_acc: 0.8326 Epoch 92/150 856/856 [==============================] - 0s 173us/sample - loss: 0.4838 - acc: 0.8376 - val_loss: 0.4464 - val_acc: 0.8372 Epoch 93/150 856/856 [==============================] - 0s 173us/sample - loss: 0.4894 - acc: 0.8376 - val_loss: 0.4666 - val_acc: 0.8326 Epoch 94/150 856/856 [==============================] - 0s 175us/sample - loss: 0.5010 - acc: 0.8329 - val_loss: 0.4452 - val_acc: 0.8326 Epoch 95/150 856/856 [==============================] - 0s 183us/sample - loss: 0.4760 - acc: 0.8446 - val_loss: 0.4485 - val_acc: 0.8326 Epoch 96/150 856/856 [==============================] - 0s 199us/sample - loss: 0.4873 - acc: 0.8353 - val_loss: 0.4423 - val_acc: 0.8326 Epoch 97/150 856/856 [==============================] - 0s 181us/sample - loss: 0.4932 - acc: 0.8329 - val_loss: 0.4579 - val_acc: 0.8326 Epoch 98/150 856/856 [==============================] - 0s 171us/sample - loss: 0.4798 - acc: 0.8294 - val_loss: 0.4655 - val_acc: 0.8326 Epoch 99/150 856/856 [==============================] - 0s 183us/sample - loss: 0.4864 - acc: 0.8364 - val_loss: 0.4450 - val_acc: 0.8326 Epoch 100/150 856/856 [==============================] - 0s 181us/sample - loss: 0.4853 - acc: 0.8376 - val_loss: 0.4459 - val_acc: 0.8326 Epoch 101/150 856/856 [==============================] - 0s 182us/sample - loss: 0.4834 - acc: 0.8306 - val_loss: 0.4462 - val_acc: 0.8372 Epoch 102/150 856/856 [==============================] - 0s 179us/sample - loss: 0.4985 - acc: 0.8353 - val_loss: 0.4570 - val_acc: 0.8326 Epoch 103/150 856/856 [==============================] - 0s 290us/sample - loss: 0.4786 - acc: 0.8400 - val_loss: 0.4452 - val_acc: 0.8326 Epoch 104/150 856/856 [==============================] - 0s 176us/sample - loss: 0.4934 - acc: 0.8353 - val_loss: 0.4517 - val_acc: 0.8326 Epoch 105/150 856/856 [==============================] - 0s 186us/sample - loss: 0.4874 - acc: 0.8364 - val_loss: 0.4585 - val_acc: 0.8326 Epoch 106/150 856/856 [==============================] - 0s 189us/sample - loss: 0.4982 - acc: 0.8329 - val_loss: 0.4799 - val_acc: 0.8326 Epoch 107/150 856/856 [==============================] - 0s 201us/sample - loss: 0.4842 - acc: 0.8341 - val_loss: 0.4643 - val_acc: 0.8326 Epoch 108/150 856/856 [==============================] - 0s 203us/sample - loss: 0.4939 - acc: 0.8318 - val_loss: 0.4587 - val_acc: 0.8326 Epoch 109/150 856/856 [==============================] - 0s 172us/sample - loss: 0.4853 - acc: 0.8400 - val_loss: 0.4550 - val_acc: 0.8326 Epoch 110/150 856/856 [==============================] - 0s 183us/sample - loss: 0.4984 - acc: 0.8353 - val_loss: 0.4653 - val_acc: 0.8326 Epoch 111/150 856/856 [==============================] - 0s 181us/sample - loss: 0.4979 - acc: 0.8306 - val_loss: 0.4768 - val_acc: 0.8279 Epoch 112/150 856/856 [==============================] - 0s 187us/sample - loss: 0.4962 - acc: 0.8341 - val_loss: 0.4509 - val_acc: 0.8326 Epoch 113/150 856/856 [==============================] - 0s 192us/sample - loss: 0.4750 - acc: 0.8446 - val_loss: 0.4486 - val_acc: 0.8326 Epoch 114/150 856/856 [==============================] - 0s 199us/sample - loss: 0.4888 - acc: 0.8353 - val_loss: 0.4517 - val_acc: 0.8326 Epoch 115/150 856/856 [==============================] - 0s 197us/sample - loss: 0.4848 - acc: 0.8400 - val_loss: 0.4671 - val_acc: 0.8326 Epoch 116/150 856/856 [==============================] - 0s 172us/sample - loss: 0.4761 - acc: 0.8376 - val_loss: 0.4686 - val_acc: 0.8419 Epoch 117/150 856/856 [==============================] - 0s 182us/sample - loss: 0.5148 - acc: 0.8259 - val_loss: 0.4476 - val_acc: 0.8326 Epoch 118/150 856/856 [==============================] - 0s 189us/sample - loss: 0.4896 - acc: 0.8294 - val_loss: 0.4829 - val_acc: 0.8326 Epoch 119/150 856/856 [==============================] - 0s 168us/sample - loss: 0.4958 - acc: 0.8388 - val_loss: 0.4825 - val_acc: 0.8326 Epoch 120/150 856/856 [==============================] - 0s 173us/sample - loss: 0.4922 - acc: 0.8400 - val_loss: 0.4656 - val_acc: 0.8372 Epoch 121/150 856/856 [==============================] - 0s 172us/sample - loss: 0.4956 - acc: 0.8259 - val_loss: 0.4766 - val_acc: 0.8326 Epoch 122/150 856/856 [==============================] - 0s 173us/sample - loss: 0.4774 - acc: 0.8446 - val_loss: 0.4546 - val_acc: 0.8326 Epoch 123/150 856/856 [==============================] - 0s 178us/sample - loss: 0.4838 - acc: 0.8423 - val_loss: 0.4498 - val_acc: 0.8326 Epoch 124/150 856/856 [==============================] - 0s 180us/sample - loss: 0.4838 - acc: 0.8400 - val_loss: 0.4576 - val_acc: 0.8326 Epoch 125/150 856/856 [==============================] - 0s 187us/sample - loss: 0.4780 - acc: 0.8458 - val_loss: 0.4600 - val_acc: 0.8326 Epoch 126/150 856/856 [==============================] - 0s 181us/sample - loss: 0.4854 - acc: 0.8318 - val_loss: 0.4390 - val_acc: 0.8279 Epoch 127/150 856/856 [==============================] - 0s 178us/sample - loss: 0.4771 - acc: 0.8470 - val_loss: 0.4435 - val_acc: 0.8326 Epoch 128/150 856/856 [==============================] - 0s 213us/sample - loss: 0.4940 - acc: 0.8329 - val_loss: 0.4520 - val_acc: 0.8326 Epoch 129/150 856/856 [==============================] - 0s 190us/sample - loss: 0.4873 - acc: 0.8388 - val_loss: 0.4505 - val_acc: 0.8326 Epoch 130/150 856/856 [==============================] - 0s 178us/sample - loss: 0.4887 - acc: 0.8435 - val_loss: 0.4495 - val_acc: 0.8326 Epoch 131/150 856/856 [==============================] - 0s 175us/sample - loss: 0.4781 - acc: 0.8341 - val_loss: 0.4586 - val_acc: 0.8326 Epoch 132/150 856/856 [==============================] - 0s 180us/sample - loss: 0.4935 - acc: 0.8329 - val_loss: 0.4487 - val_acc: 0.8326 Epoch 133/150 856/856 [==============================] - 0s 179us/sample - loss: 0.4774 - acc: 0.8306 - val_loss: 0.4425 - val_acc: 0.8326 Epoch 134/150 856/856 [==============================] - 0s 187us/sample - loss: 0.4738 - acc: 0.8376 - val_loss: 0.4473 - val_acc: 0.8326 Epoch 135/150 856/856 [==============================] - 0s 242us/sample - loss: 0.4747 - acc: 0.8388 - val_loss: 0.4537 - val_acc: 0.8326 Epoch 136/150 856/856 [==============================] - 0s 180us/sample - loss: 0.5033 - acc: 0.8376 - val_loss: 0.4631 - val_acc: 0.8326 Epoch 137/150 856/856 [==============================] - 0s 183us/sample - loss: 0.4746 - acc: 0.8364 - val_loss: 0.4636 - val_acc: 0.8326 Epoch 138/150 856/856 [==============================] - 0s 180us/sample - loss: 0.4681 - acc: 0.8481 - val_loss: 0.4366 - val_acc: 0.8326 Epoch 139/150 856/856 [==============================] - 0s 193us/sample - loss: 0.4691 - acc: 0.8435 - val_loss: 0.4447 - val_acc: 0.8372 Epoch 140/150 856/856 [==============================] - 0s 200us/sample - loss: 0.4810 - acc: 0.8376 - val_loss: 0.4403 - val_acc: 0.8326 Epoch 141/150 856/856 [==============================] - 0s 173us/sample - loss: 0.4791 - acc: 0.8306 - val_loss: 0.4359 - val_acc: 0.8326 Epoch 142/150 856/856 [==============================] - 0s 200us/sample - loss: 0.4785 - acc: 0.8388 - val_loss: 0.4355 - val_acc: 0.8326 Epoch 143/150 856/856 [==============================] - 0s 195us/sample - loss: 0.4879 - acc: 0.8294 - val_loss: 0.4426 - val_acc: 0.8326 Epoch 144/150 856/856 [==============================] - 0s 180us/sample - loss: 0.4765 - acc: 0.8411 - val_loss: 0.4473 - val_acc: 0.8326 Epoch 145/150 856/856 [==============================] - 0s 192us/sample - loss: 0.4628 - acc: 0.8446 - val_loss: 0.4467 - val_acc: 0.8326 Epoch 146/150 856/856 [==============================] - 0s 186us/sample - loss: 0.4795 - acc: 0.8318 - val_loss: 0.4558 - val_acc: 0.8326 Epoch 147/150 856/856 [==============================] - 0s 172us/sample - loss: 0.4772 - acc: 0.8353 - val_loss: 0.4514 - val_acc: 0.8372 Epoch 148/150 856/856 [==============================] - 0s 178us/sample - loss: 0.4843 - acc: 0.8364 - val_loss: 0.4453 - val_acc: 0.8326 Epoch 149/150 856/856 [==============================] - 0s 179us/sample - loss: 0.4652 - acc: 0.8376 - val_loss: 0.4437 - val_acc: 0.8372 Epoch 150/150 856/856 [==============================] - 0s 195us/sample - loss: 0.4708 - acc: 0.8388 - val_loss: 0.4371 - val_acc: 0.8326
y_pred = model1.predict_classes(X_test)
y_classes = [np.argmax(y, axis=None, out=None) for y in y_test]
print(classification_report(y_pred,y_classes))
precision recall f1-score support
1 0.00 0.00 0.00 0
2 1.00 0.81 0.90 528
3 0.00 0.00 0.00 0
accuracy 0.81 528
macro avg 0.33 0.27 0.30 528
weighted avg 1.00 0.81 0.90 528
Final Conclusion:
model.save("CatModelTrainedOnPC.h5")